code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
# Another option, seems to work great. Catches things like ''.<tab>
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable
if not m:
return []
expr, attr = m.group(1, 3)
try:
obj = eval(expr, self.namespace)
except:
try:
obj = eval(expr, self.global_namespace)
except:
return []
filter = _StartsWithFilter(attr)
words = dir2(obj, filter=filter)
return words | def function[attr_matches, parameter[self, text]]:
constant[Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
]
import module[re]
variable[m] assign[=] call[name[re].match, parameter[constant[(\S+(\.\w+)*)\.(\w*)$], name[text]]]
if <ast.UnaryOp object at 0x7da20c6a8b80> begin[:]
return[list[[]]]
<ast.Tuple object at 0x7da20c6aa470> assign[=] call[name[m].group, parameter[constant[1], constant[3]]]
<ast.Try object at 0x7da20c6abb20>
variable[filter] assign[=] call[name[_StartsWithFilter], parameter[name[attr]]]
variable[words] assign[=] call[name[dir2], parameter[name[obj]]]
return[name[words]] | keyword[def] identifier[attr_matches] ( identifier[self] , identifier[text] ):
literal[string]
keyword[import] identifier[re]
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[text] )
keyword[if] keyword[not] identifier[m] :
keyword[return] []
identifier[expr] , identifier[attr] = identifier[m] . identifier[group] ( literal[int] , literal[int] )
keyword[try] :
identifier[obj] = identifier[eval] ( identifier[expr] , identifier[self] . identifier[namespace] )
keyword[except] :
keyword[try] :
identifier[obj] = identifier[eval] ( identifier[expr] , identifier[self] . identifier[global_namespace] )
keyword[except] :
keyword[return] []
identifier[filter] = identifier[_StartsWithFilter] ( identifier[attr] )
identifier[words] = identifier[dir2] ( identifier[obj] , identifier[filter] = identifier[filter] )
keyword[return] identifier[words] | def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
# Another option, seems to work great. Catches things like ''.<tab>
m = re.match('(\\S+(\\.\\w+)*)\\.(\\w*)$', text) # @UndefinedVariable
if not m:
return [] # depends on [control=['if'], data=[]]
(expr, attr) = m.group(1, 3)
try:
obj = eval(expr, self.namespace) # depends on [control=['try'], data=[]]
except:
try:
obj = eval(expr, self.global_namespace) # depends on [control=['try'], data=[]]
except:
return [] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
filter = _StartsWithFilter(attr)
words = dir2(obj, filter=filter)
return words |
def _R2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rforce
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2016-05-13 - Written - Aladdin
"""
return self._denom(R, z)**-1.5 - 3.*R**2 * self._denom(R, z)**-2.5 | def function[_R2deriv, parameter[self, R, z, phi, t]]:
constant[
NAME:
_Rforce
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2016-05-13 - Written - Aladdin
]
return[binary_operation[binary_operation[call[name[self]._denom, parameter[name[R], name[z]]] ** <ast.UnaryOp object at 0x7da18f812a70>] - binary_operation[binary_operation[constant[3.0] * binary_operation[name[R] ** constant[2]]] * binary_operation[call[name[self]._denom, parameter[name[R], name[z]]] ** <ast.UnaryOp object at 0x7da18f812e30>]]]] | keyword[def] identifier[_R2deriv] ( identifier[self] , identifier[R] , identifier[z] , identifier[phi] = literal[int] , identifier[t] = literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_denom] ( identifier[R] , identifier[z] )**- literal[int] - literal[int] * identifier[R] ** literal[int] * identifier[self] . identifier[_denom] ( identifier[R] , identifier[z] )**- literal[int] | def _R2deriv(self, R, z, phi=0.0, t=0.0):
"""
NAME:
_Rforce
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2016-05-13 - Written - Aladdin
"""
return self._denom(R, z) ** (-1.5) - 3.0 * R ** 2 * self._denom(R, z) ** (-2.5) |
def get_blocks_overview(block_representation_list, coin_symbol='btc', txn_limit=None, api_key=None):
'''
Batch request version of get_blocks_overview
'''
for block_representation in block_representation_list:
assert is_valid_block_representation(
block_representation=block_representation,
coin_symbol=coin_symbol)
assert is_valid_coin_symbol(coin_symbol)
blocks = ';'.join([str(x) for x in block_representation_list])
url = make_url(coin_symbol, **dict(blocks=blocks))
logger.info(url)
params = {}
if api_key:
params['token'] = api_key
if txn_limit:
params['limit'] = txn_limit
r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS)
r = get_valid_json(r)
return [_clean_tx(response_dict=d) for d in r] | def function[get_blocks_overview, parameter[block_representation_list, coin_symbol, txn_limit, api_key]]:
constant[
Batch request version of get_blocks_overview
]
for taget[name[block_representation]] in starred[name[block_representation_list]] begin[:]
assert[call[name[is_valid_block_representation], parameter[]]]
assert[call[name[is_valid_coin_symbol], parameter[name[coin_symbol]]]]
variable[blocks] assign[=] call[constant[;].join, parameter[<ast.ListComp object at 0x7da18bccb430>]]
variable[url] assign[=] call[name[make_url], parameter[name[coin_symbol]]]
call[name[logger].info, parameter[name[url]]]
variable[params] assign[=] dictionary[[], []]
if name[api_key] begin[:]
call[name[params]][constant[token]] assign[=] name[api_key]
if name[txn_limit] begin[:]
call[name[params]][constant[limit]] assign[=] name[txn_limit]
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
variable[r] assign[=] call[name[get_valid_json], parameter[name[r]]]
return[<ast.ListComp object at 0x7da18bcc81f0>] | keyword[def] identifier[get_blocks_overview] ( identifier[block_representation_list] , identifier[coin_symbol] = literal[string] , identifier[txn_limit] = keyword[None] , identifier[api_key] = keyword[None] ):
literal[string]
keyword[for] identifier[block_representation] keyword[in] identifier[block_representation_list] :
keyword[assert] identifier[is_valid_block_representation] (
identifier[block_representation] = identifier[block_representation] ,
identifier[coin_symbol] = identifier[coin_symbol] )
keyword[assert] identifier[is_valid_coin_symbol] ( identifier[coin_symbol] )
identifier[blocks] = literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[block_representation_list] ])
identifier[url] = identifier[make_url] ( identifier[coin_symbol] ,** identifier[dict] ( identifier[blocks] = identifier[blocks] ))
identifier[logger] . identifier[info] ( identifier[url] )
identifier[params] ={}
keyword[if] identifier[api_key] :
identifier[params] [ literal[string] ]= identifier[api_key]
keyword[if] identifier[txn_limit] :
identifier[params] [ literal[string] ]= identifier[txn_limit]
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[verify] = keyword[True] , identifier[timeout] = identifier[TIMEOUT_IN_SECONDS] )
identifier[r] = identifier[get_valid_json] ( identifier[r] )
keyword[return] [ identifier[_clean_tx] ( identifier[response_dict] = identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[r] ] | def get_blocks_overview(block_representation_list, coin_symbol='btc', txn_limit=None, api_key=None):
"""
Batch request version of get_blocks_overview
"""
for block_representation in block_representation_list:
assert is_valid_block_representation(block_representation=block_representation, coin_symbol=coin_symbol) # depends on [control=['for'], data=['block_representation']]
assert is_valid_coin_symbol(coin_symbol)
blocks = ';'.join([str(x) for x in block_representation_list])
url = make_url(coin_symbol, **dict(blocks=blocks))
logger.info(url)
params = {}
if api_key:
params['token'] = api_key # depends on [control=['if'], data=[]]
if txn_limit:
params['limit'] = txn_limit # depends on [control=['if'], data=[]]
r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS)
r = get_valid_json(r)
return [_clean_tx(response_dict=d) for d in r] |
def red_get_mount_connectors(red_data, ignore_outputs):
"""
Returns a list of mounting connectors
:param red_data: The red data to be searched
:param ignore_outputs: If outputs should be ignored
:return: A list of connectors with active mount option.
"""
keys = []
batches = red_data.get('batches')
inputs = red_data.get('inputs')
if batches:
for batch in batches:
keys.extend(red_get_mount_connectors_from_inputs(batch['inputs']))
elif inputs:
keys.extend(red_get_mount_connectors_from_inputs(inputs))
if not ignore_outputs:
outputs = red_data.get('outputs')
if batches:
for batch in batches:
batch_outputs = batch.get('outputs')
if batch_outputs:
keys.extend(red_get_mount_connectors_from_outputs(batch_outputs))
elif outputs:
keys.extend(red_get_mount_connectors_from_outputs(outputs))
return keys | def function[red_get_mount_connectors, parameter[red_data, ignore_outputs]]:
constant[
Returns a list of mounting connectors
:param red_data: The red data to be searched
:param ignore_outputs: If outputs should be ignored
:return: A list of connectors with active mount option.
]
variable[keys] assign[=] list[[]]
variable[batches] assign[=] call[name[red_data].get, parameter[constant[batches]]]
variable[inputs] assign[=] call[name[red_data].get, parameter[constant[inputs]]]
if name[batches] begin[:]
for taget[name[batch]] in starred[name[batches]] begin[:]
call[name[keys].extend, parameter[call[name[red_get_mount_connectors_from_inputs], parameter[call[name[batch]][constant[inputs]]]]]]
if <ast.UnaryOp object at 0x7da2041da830> begin[:]
variable[outputs] assign[=] call[name[red_data].get, parameter[constant[outputs]]]
if name[batches] begin[:]
for taget[name[batch]] in starred[name[batches]] begin[:]
variable[batch_outputs] assign[=] call[name[batch].get, parameter[constant[outputs]]]
if name[batch_outputs] begin[:]
call[name[keys].extend, parameter[call[name[red_get_mount_connectors_from_outputs], parameter[name[batch_outputs]]]]]
return[name[keys]] | keyword[def] identifier[red_get_mount_connectors] ( identifier[red_data] , identifier[ignore_outputs] ):
literal[string]
identifier[keys] =[]
identifier[batches] = identifier[red_data] . identifier[get] ( literal[string] )
identifier[inputs] = identifier[red_data] . identifier[get] ( literal[string] )
keyword[if] identifier[batches] :
keyword[for] identifier[batch] keyword[in] identifier[batches] :
identifier[keys] . identifier[extend] ( identifier[red_get_mount_connectors_from_inputs] ( identifier[batch] [ literal[string] ]))
keyword[elif] identifier[inputs] :
identifier[keys] . identifier[extend] ( identifier[red_get_mount_connectors_from_inputs] ( identifier[inputs] ))
keyword[if] keyword[not] identifier[ignore_outputs] :
identifier[outputs] = identifier[red_data] . identifier[get] ( literal[string] )
keyword[if] identifier[batches] :
keyword[for] identifier[batch] keyword[in] identifier[batches] :
identifier[batch_outputs] = identifier[batch] . identifier[get] ( literal[string] )
keyword[if] identifier[batch_outputs] :
identifier[keys] . identifier[extend] ( identifier[red_get_mount_connectors_from_outputs] ( identifier[batch_outputs] ))
keyword[elif] identifier[outputs] :
identifier[keys] . identifier[extend] ( identifier[red_get_mount_connectors_from_outputs] ( identifier[outputs] ))
keyword[return] identifier[keys] | def red_get_mount_connectors(red_data, ignore_outputs):
"""
Returns a list of mounting connectors
:param red_data: The red data to be searched
:param ignore_outputs: If outputs should be ignored
:return: A list of connectors with active mount option.
"""
keys = []
batches = red_data.get('batches')
inputs = red_data.get('inputs')
if batches:
for batch in batches:
keys.extend(red_get_mount_connectors_from_inputs(batch['inputs'])) # depends on [control=['for'], data=['batch']] # depends on [control=['if'], data=[]]
elif inputs:
keys.extend(red_get_mount_connectors_from_inputs(inputs)) # depends on [control=['if'], data=[]]
if not ignore_outputs:
outputs = red_data.get('outputs')
if batches:
for batch in batches:
batch_outputs = batch.get('outputs')
if batch_outputs:
keys.extend(red_get_mount_connectors_from_outputs(batch_outputs)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['batch']] # depends on [control=['if'], data=[]]
elif outputs:
keys.extend(red_get_mount_connectors_from_outputs(outputs)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return keys |
def splitext(self):
""" p.splitext() -> Return (p.stripext(), p.ext).
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from '.' to the end of the
last path segment. This has the property that if
(a, b) == p.splitext(), then a + b == p.
"""
filename, ext = os.path.splitext(self)
return self.__class__(filename), ext | def function[splitext, parameter[self]]:
constant[ p.splitext() -> Return (p.stripext(), p.ext).
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from '.' to the end of the
last path segment. This has the property that if
(a, b) == p.splitext(), then a + b == p.
]
<ast.Tuple object at 0x7da1b242b460> assign[=] call[name[os].path.splitext, parameter[name[self]]]
return[tuple[[<ast.Call object at 0x7da1b242b040>, <ast.Name object at 0x7da1b242b400>]]] | keyword[def] identifier[splitext] ( identifier[self] ):
literal[string]
identifier[filename] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[self] )
keyword[return] identifier[self] . identifier[__class__] ( identifier[filename] ), identifier[ext] | def splitext(self):
""" p.splitext() -> Return (p.stripext(), p.ext).
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from '.' to the end of the
last path segment. This has the property that if
(a, b) == p.splitext(), then a + b == p.
"""
(filename, ext) = os.path.splitext(self)
return (self.__class__(filename), ext) |
def find_element(self, by, value, view_cls=None):
# type: (By, Any, View) -> View
"""
Find one element matching condition
:param by: Type of condition
:param value: Condition value
:param view_cls: Optional custom class to wrap returned elements
:return: Matching element wrapped in a view
"""
if view_cls is None:
view_cls = View
return view_cls(lambda: self.root.find_element(by, value)) | def function[find_element, parameter[self, by, value, view_cls]]:
constant[
Find one element matching condition
:param by: Type of condition
:param value: Condition value
:param view_cls: Optional custom class to wrap returned elements
:return: Matching element wrapped in a view
]
if compare[name[view_cls] is constant[None]] begin[:]
variable[view_cls] assign[=] name[View]
return[call[name[view_cls], parameter[<ast.Lambda object at 0x7da1b14729e0>]]] | keyword[def] identifier[find_element] ( identifier[self] , identifier[by] , identifier[value] , identifier[view_cls] = keyword[None] ):
literal[string]
keyword[if] identifier[view_cls] keyword[is] keyword[None] :
identifier[view_cls] = identifier[View]
keyword[return] identifier[view_cls] ( keyword[lambda] : identifier[self] . identifier[root] . identifier[find_element] ( identifier[by] , identifier[value] )) | def find_element(self, by, value, view_cls=None):
# type: (By, Any, View) -> View
'\n Find one element matching condition\n :param by: Type of condition\n :param value: Condition value\n :param view_cls: Optional custom class to wrap returned elements\n :return: Matching element wrapped in a view\n '
if view_cls is None:
view_cls = View # depends on [control=['if'], data=['view_cls']]
return view_cls(lambda : self.root.find_element(by, value)) |
def cycle_file(source_plaintext_filename):
"""Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
"""
# Create a static random master key provider
key_id = os.urandom(8)
master_key_provider = StaticRandomMasterKeyProvider()
master_key_provider.add_master_key(key_id)
ciphertext_filename = source_plaintext_filename + ".encrypted"
cycled_plaintext_filename = source_plaintext_filename + ".decrypted"
# Encrypt the plaintext source data
with open(source_plaintext_filename, "rb") as plaintext, open(ciphertext_filename, "wb") as ciphertext:
with aws_encryption_sdk.stream(mode="e", source=plaintext, key_provider=master_key_provider) as encryptor:
for chunk in encryptor:
ciphertext.write(chunk)
# Decrypt the ciphertext
with open(ciphertext_filename, "rb") as ciphertext, open(cycled_plaintext_filename, "wb") as plaintext:
with aws_encryption_sdk.stream(mode="d", source=ciphertext, key_provider=master_key_provider) as decryptor:
for chunk in decryptor:
plaintext.write(chunk)
# Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source
# plaintext
assert filecmp.cmp(source_plaintext_filename, cycled_plaintext_filename)
# Verify that the encryption context used in the decrypt operation includes all key pairs from
# the encrypt operation
#
# In production, always use a meaningful encryption context. In this sample, we omit the
# encryption context (no key pairs).
assert all(
pair in decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items()
)
return ciphertext_filename, cycled_plaintext_filename | def function[cycle_file, parameter[source_plaintext_filename]]:
constant[Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
]
variable[key_id] assign[=] call[name[os].urandom, parameter[constant[8]]]
variable[master_key_provider] assign[=] call[name[StaticRandomMasterKeyProvider], parameter[]]
call[name[master_key_provider].add_master_key, parameter[name[key_id]]]
variable[ciphertext_filename] assign[=] binary_operation[name[source_plaintext_filename] + constant[.encrypted]]
variable[cycled_plaintext_filename] assign[=] binary_operation[name[source_plaintext_filename] + constant[.decrypted]]
with call[name[open], parameter[name[source_plaintext_filename], constant[rb]]] begin[:]
with call[name[aws_encryption_sdk].stream, parameter[]] begin[:]
for taget[name[chunk]] in starred[name[encryptor]] begin[:]
call[name[ciphertext].write, parameter[name[chunk]]]
with call[name[open], parameter[name[ciphertext_filename], constant[rb]]] begin[:]
with call[name[aws_encryption_sdk].stream, parameter[]] begin[:]
for taget[name[chunk]] in starred[name[decryptor]] begin[:]
call[name[plaintext].write, parameter[name[chunk]]]
assert[call[name[filecmp].cmp, parameter[name[source_plaintext_filename], name[cycled_plaintext_filename]]]]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b26ac7c0>]]]
return[tuple[[<ast.Name object at 0x7da2054a61a0>, <ast.Name object at 0x7da2054a5c60>]]] | keyword[def] identifier[cycle_file] ( identifier[source_plaintext_filename] ):
literal[string]
identifier[key_id] = identifier[os] . identifier[urandom] ( literal[int] )
identifier[master_key_provider] = identifier[StaticRandomMasterKeyProvider] ()
identifier[master_key_provider] . identifier[add_master_key] ( identifier[key_id] )
identifier[ciphertext_filename] = identifier[source_plaintext_filename] + literal[string]
identifier[cycled_plaintext_filename] = identifier[source_plaintext_filename] + literal[string]
keyword[with] identifier[open] ( identifier[source_plaintext_filename] , literal[string] ) keyword[as] identifier[plaintext] , identifier[open] ( identifier[ciphertext_filename] , literal[string] ) keyword[as] identifier[ciphertext] :
keyword[with] identifier[aws_encryption_sdk] . identifier[stream] ( identifier[mode] = literal[string] , identifier[source] = identifier[plaintext] , identifier[key_provider] = identifier[master_key_provider] ) keyword[as] identifier[encryptor] :
keyword[for] identifier[chunk] keyword[in] identifier[encryptor] :
identifier[ciphertext] . identifier[write] ( identifier[chunk] )
keyword[with] identifier[open] ( identifier[ciphertext_filename] , literal[string] ) keyword[as] identifier[ciphertext] , identifier[open] ( identifier[cycled_plaintext_filename] , literal[string] ) keyword[as] identifier[plaintext] :
keyword[with] identifier[aws_encryption_sdk] . identifier[stream] ( identifier[mode] = literal[string] , identifier[source] = identifier[ciphertext] , identifier[key_provider] = identifier[master_key_provider] ) keyword[as] identifier[decryptor] :
keyword[for] identifier[chunk] keyword[in] identifier[decryptor] :
identifier[plaintext] . identifier[write] ( identifier[chunk] )
keyword[assert] identifier[filecmp] . identifier[cmp] ( identifier[source_plaintext_filename] , identifier[cycled_plaintext_filename] )
keyword[assert] identifier[all] (
identifier[pair] keyword[in] identifier[decryptor] . identifier[header] . identifier[encryption_context] . identifier[items] () keyword[for] identifier[pair] keyword[in] identifier[encryptor] . identifier[header] . identifier[encryption_context] . identifier[items] ()
)
keyword[return] identifier[ciphertext_filename] , identifier[cycled_plaintext_filename] | def cycle_file(source_plaintext_filename):
"""Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
"""
# Create a static random master key provider
key_id = os.urandom(8)
master_key_provider = StaticRandomMasterKeyProvider()
master_key_provider.add_master_key(key_id)
ciphertext_filename = source_plaintext_filename + '.encrypted'
cycled_plaintext_filename = source_plaintext_filename + '.decrypted'
# Encrypt the plaintext source data
with open(source_plaintext_filename, 'rb') as plaintext, open(ciphertext_filename, 'wb') as ciphertext:
with aws_encryption_sdk.stream(mode='e', source=plaintext, key_provider=master_key_provider) as encryptor:
for chunk in encryptor:
ciphertext.write(chunk) # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['encryptor']] # depends on [control=['with'], data=['plaintext']]
# Decrypt the ciphertext
with open(ciphertext_filename, 'rb') as ciphertext, open(cycled_plaintext_filename, 'wb') as plaintext:
with aws_encryption_sdk.stream(mode='d', source=ciphertext, key_provider=master_key_provider) as decryptor:
for chunk in decryptor:
plaintext.write(chunk) # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['decryptor']] # depends on [control=['with'], data=['ciphertext']]
# Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source
# plaintext
assert filecmp.cmp(source_plaintext_filename, cycled_plaintext_filename)
# Verify that the encryption context used in the decrypt operation includes all key pairs from
# the encrypt operation
#
# In production, always use a meaningful encryption context. In this sample, we omit the
# encryption context (no key pairs).
assert all((pair in decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items()))
return (ciphertext_filename, cycled_plaintext_filename) |
def readLocationElement(self, locationElement):
""" Format 0 location reader """
loc = Location()
for dimensionElement in locationElement.findall(".dimension"):
dimName = dimensionElement.attrib.get("name")
xValue = yValue = None
try:
xValue = dimensionElement.attrib.get('xvalue')
xValue = float(xValue)
except ValueError:
if self.logger:
self.logger.info("KeyError in readLocation xValue %3.3f", xValue)
try:
yValue = dimensionElement.attrib.get('yvalue')
if yValue is not None:
yValue = float(yValue)
except ValueError:
pass
if yValue is not None:
loc[dimName] = (xValue, yValue)
else:
loc[dimName] = xValue
return loc | def function[readLocationElement, parameter[self, locationElement]]:
constant[ Format 0 location reader ]
variable[loc] assign[=] call[name[Location], parameter[]]
for taget[name[dimensionElement]] in starred[call[name[locationElement].findall, parameter[constant[.dimension]]]] begin[:]
variable[dimName] assign[=] call[name[dimensionElement].attrib.get, parameter[constant[name]]]
variable[xValue] assign[=] constant[None]
<ast.Try object at 0x7da2047eb160>
<ast.Try object at 0x7da2047e8370>
if compare[name[yValue] is_not constant[None]] begin[:]
call[name[loc]][name[dimName]] assign[=] tuple[[<ast.Name object at 0x7da1b0b298a0>, <ast.Name object at 0x7da1b0b2b520>]]
return[name[loc]] | keyword[def] identifier[readLocationElement] ( identifier[self] , identifier[locationElement] ):
literal[string]
identifier[loc] = identifier[Location] ()
keyword[for] identifier[dimensionElement] keyword[in] identifier[locationElement] . identifier[findall] ( literal[string] ):
identifier[dimName] = identifier[dimensionElement] . identifier[attrib] . identifier[get] ( literal[string] )
identifier[xValue] = identifier[yValue] = keyword[None]
keyword[try] :
identifier[xValue] = identifier[dimensionElement] . identifier[attrib] . identifier[get] ( literal[string] )
identifier[xValue] = identifier[float] ( identifier[xValue] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[self] . identifier[logger] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[xValue] )
keyword[try] :
identifier[yValue] = identifier[dimensionElement] . identifier[attrib] . identifier[get] ( literal[string] )
keyword[if] identifier[yValue] keyword[is] keyword[not] keyword[None] :
identifier[yValue] = identifier[float] ( identifier[yValue] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] identifier[yValue] keyword[is] keyword[not] keyword[None] :
identifier[loc] [ identifier[dimName] ]=( identifier[xValue] , identifier[yValue] )
keyword[else] :
identifier[loc] [ identifier[dimName] ]= identifier[xValue]
keyword[return] identifier[loc] | def readLocationElement(self, locationElement):
""" Format 0 location reader """
loc = Location()
for dimensionElement in locationElement.findall('.dimension'):
dimName = dimensionElement.attrib.get('name')
xValue = yValue = None
try:
xValue = dimensionElement.attrib.get('xvalue')
xValue = float(xValue) # depends on [control=['try'], data=[]]
except ValueError:
if self.logger:
self.logger.info('KeyError in readLocation xValue %3.3f', xValue) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
try:
yValue = dimensionElement.attrib.get('yvalue')
if yValue is not None:
yValue = float(yValue) # depends on [control=['if'], data=['yValue']] # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
if yValue is not None:
loc[dimName] = (xValue, yValue) # depends on [control=['if'], data=['yValue']]
else:
loc[dimName] = xValue # depends on [control=['for'], data=['dimensionElement']]
return loc |
def invalid_example_number(region_code):
"""Gets an invalid number for the specified region.
This is useful for unit-testing purposes, where you want to test what
will happen with an invalid number. Note that the number that is
returned will always be able to be parsed and will have the correct
country code. It may also be a valid *short* number/code for this
region. Validity checking such numbers is handled with shortnumberinfo.
Arguments:
region_code -- The region for which an example number is needed.
Returns an invalid number for the specified region. Returns None when an
unsupported region or the region 001 (Earth) is passed in.
"""
if not _is_valid_region_code(region_code):
return None
# We start off with a valid fixed-line number since every country
# supports this. Alternatively we could start with a different number
# type, since fixed-line numbers typically have a wide breadth of valid
# number lengths and we may have to make it very short before we get an
# invalid number.
metadata = PhoneMetadata.metadata_for_region(region_code.upper())
desc = _number_desc_by_type(metadata, PhoneNumberType.FIXED_LINE)
if desc is None or desc.example_number is None:
# This shouldn't happen; we have a test for this.
return None # pragma no cover
example_number = desc.example_number
# Try and make the number invalid. We do this by changing the length. We
# try reducing the length of the number, since currently no region has a
# number that is the same length as MIN_LENGTH_FOR_NSN. This is probably
# quicker than making the number longer, which is another
# alternative. We could also use the possible number pattern to extract
# the possible lengths of the number to make this faster, but this
# method is only for unit-testing so simplicity is preferred to
# performance. We don't want to return a number that can't be parsed,
# so we check the number is long enough. We try all possible lengths
# because phone number plans often have overlapping prefixes so the
# number 123456 might be valid as a fixed-line number, and 12345 as a
# mobile number. It would be faster to loop in a different order, but we
# prefer numbers that look closer to real numbers (and it gives us a
# variety of different lengths for the resulting phone numbers -
# otherwise they would all be MIN_LENGTH_FOR_NSN digits long.)
phone_number_length = len(example_number) - 1
while phone_number_length >= _MIN_LENGTH_FOR_NSN:
number_to_try = example_number[:phone_number_length]
try:
possibly_valid_number = parse(number_to_try, region_code)
if not is_valid_number(possibly_valid_number):
return possibly_valid_number
except NumberParseException: # pragma no cover
# Shouldn't happen: we have already checked the length, we know
# example numbers have only valid digits, and we know the region
# code is fine.
pass
phone_number_length -= 1
# We have a test to check that this doesn't happen for any of our
# supported regions.
return None | def function[invalid_example_number, parameter[region_code]]:
constant[Gets an invalid number for the specified region.
This is useful for unit-testing purposes, where you want to test what
will happen with an invalid number. Note that the number that is
returned will always be able to be parsed and will have the correct
country code. It may also be a valid *short* number/code for this
region. Validity checking such numbers is handled with shortnumberinfo.
Arguments:
region_code -- The region for which an example number is needed.
Returns an invalid number for the specified region. Returns None when an
unsupported region or the region 001 (Earth) is passed in.
]
if <ast.UnaryOp object at 0x7da1b19ec910> begin[:]
return[constant[None]]
variable[metadata] assign[=] call[name[PhoneMetadata].metadata_for_region, parameter[call[name[region_code].upper, parameter[]]]]
variable[desc] assign[=] call[name[_number_desc_by_type], parameter[name[metadata], name[PhoneNumberType].FIXED_LINE]]
if <ast.BoolOp object at 0x7da1b19ed030> begin[:]
return[constant[None]]
variable[example_number] assign[=] name[desc].example_number
variable[phone_number_length] assign[=] binary_operation[call[name[len], parameter[name[example_number]]] - constant[1]]
while compare[name[phone_number_length] greater_or_equal[>=] name[_MIN_LENGTH_FOR_NSN]] begin[:]
variable[number_to_try] assign[=] call[name[example_number]][<ast.Slice object at 0x7da1b194ce80>]
<ast.Try object at 0x7da1b194e110>
<ast.AugAssign object at 0x7da1b194c280>
return[constant[None]] | keyword[def] identifier[invalid_example_number] ( identifier[region_code] ):
literal[string]
keyword[if] keyword[not] identifier[_is_valid_region_code] ( identifier[region_code] ):
keyword[return] keyword[None]
identifier[metadata] = identifier[PhoneMetadata] . identifier[metadata_for_region] ( identifier[region_code] . identifier[upper] ())
identifier[desc] = identifier[_number_desc_by_type] ( identifier[metadata] , identifier[PhoneNumberType] . identifier[FIXED_LINE] )
keyword[if] identifier[desc] keyword[is] keyword[None] keyword[or] identifier[desc] . identifier[example_number] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[example_number] = identifier[desc] . identifier[example_number]
identifier[phone_number_length] = identifier[len] ( identifier[example_number] )- literal[int]
keyword[while] identifier[phone_number_length] >= identifier[_MIN_LENGTH_FOR_NSN] :
identifier[number_to_try] = identifier[example_number] [: identifier[phone_number_length] ]
keyword[try] :
identifier[possibly_valid_number] = identifier[parse] ( identifier[number_to_try] , identifier[region_code] )
keyword[if] keyword[not] identifier[is_valid_number] ( identifier[possibly_valid_number] ):
keyword[return] identifier[possibly_valid_number]
keyword[except] identifier[NumberParseException] :
keyword[pass]
identifier[phone_number_length] -= literal[int]
keyword[return] keyword[None] | def invalid_example_number(region_code):
"""Gets an invalid number for the specified region.
This is useful for unit-testing purposes, where you want to test what
will happen with an invalid number. Note that the number that is
returned will always be able to be parsed and will have the correct
country code. It may also be a valid *short* number/code for this
region. Validity checking such numbers is handled with shortnumberinfo.
Arguments:
region_code -- The region for which an example number is needed.
Returns an invalid number for the specified region. Returns None when an
unsupported region or the region 001 (Earth) is passed in.
"""
if not _is_valid_region_code(region_code):
return None # depends on [control=['if'], data=[]]
# We start off with a valid fixed-line number since every country
# supports this. Alternatively we could start with a different number
# type, since fixed-line numbers typically have a wide breadth of valid
# number lengths and we may have to make it very short before we get an
# invalid number.
metadata = PhoneMetadata.metadata_for_region(region_code.upper())
desc = _number_desc_by_type(metadata, PhoneNumberType.FIXED_LINE)
if desc is None or desc.example_number is None:
# This shouldn't happen; we have a test for this.
return None # pragma no cover # depends on [control=['if'], data=[]]
example_number = desc.example_number
# Try and make the number invalid. We do this by changing the length. We
# try reducing the length of the number, since currently no region has a
# number that is the same length as MIN_LENGTH_FOR_NSN. This is probably
# quicker than making the number longer, which is another
# alternative. We could also use the possible number pattern to extract
# the possible lengths of the number to make this faster, but this
# method is only for unit-testing so simplicity is preferred to
# performance. We don't want to return a number that can't be parsed,
# so we check the number is long enough. We try all possible lengths
# because phone number plans often have overlapping prefixes so the
# number 123456 might be valid as a fixed-line number, and 12345 as a
# mobile number. It would be faster to loop in a different order, but we
# prefer numbers that look closer to real numbers (and it gives us a
# variety of different lengths for the resulting phone numbers -
# otherwise they would all be MIN_LENGTH_FOR_NSN digits long.)
phone_number_length = len(example_number) - 1
while phone_number_length >= _MIN_LENGTH_FOR_NSN:
number_to_try = example_number[:phone_number_length]
try:
possibly_valid_number = parse(number_to_try, region_code)
if not is_valid_number(possibly_valid_number):
return possibly_valid_number # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except NumberParseException: # pragma no cover
# Shouldn't happen: we have already checked the length, we know
# example numbers have only valid digits, and we know the region
# code is fine.
pass # depends on [control=['except'], data=[]]
phone_number_length -= 1 # depends on [control=['while'], data=['phone_number_length']]
# We have a test to check that this doesn't happen for any of our
# supported regions.
return None |
def download_file(self, file_path, range=None):
"""
Download a file from Telegram servers
"""
headers = {"range": range} if range else None
url = "{0}/file/bot{1}/{2}".format(API_URL, self.api_token, file_path)
return self.session.get(
url, headers=headers, proxy=self.proxy, proxy_auth=self.proxy_auth
) | def function[download_file, parameter[self, file_path, range]]:
constant[
Download a file from Telegram servers
]
variable[headers] assign[=] <ast.IfExp object at 0x7da18fe92290>
variable[url] assign[=] call[constant[{0}/file/bot{1}/{2}].format, parameter[name[API_URL], name[self].api_token, name[file_path]]]
return[call[name[self].session.get, parameter[name[url]]]] | keyword[def] identifier[download_file] ( identifier[self] , identifier[file_path] , identifier[range] = keyword[None] ):
literal[string]
identifier[headers] ={ literal[string] : identifier[range] } keyword[if] identifier[range] keyword[else] keyword[None]
identifier[url] = literal[string] . identifier[format] ( identifier[API_URL] , identifier[self] . identifier[api_token] , identifier[file_path] )
keyword[return] identifier[self] . identifier[session] . identifier[get] (
identifier[url] , identifier[headers] = identifier[headers] , identifier[proxy] = identifier[self] . identifier[proxy] , identifier[proxy_auth] = identifier[self] . identifier[proxy_auth]
) | def download_file(self, file_path, range=None):
"""
Download a file from Telegram servers
"""
headers = {'range': range} if range else None
url = '{0}/file/bot{1}/{2}'.format(API_URL, self.api_token, file_path)
return self.session.get(url, headers=headers, proxy=self.proxy, proxy_auth=self.proxy_auth) |
def replace_variable(self, variable):
"""Substitute variables with numeric values"""
if variable == 'x':
return self.value
if variable == 't':
return self.timedelta
raise ValueError("Invalid variable %s", variable) | def function[replace_variable, parameter[self, variable]]:
constant[Substitute variables with numeric values]
if compare[name[variable] equal[==] constant[x]] begin[:]
return[name[self].value]
if compare[name[variable] equal[==] constant[t]] begin[:]
return[name[self].timedelta]
<ast.Raise object at 0x7da18f09f340> | keyword[def] identifier[replace_variable] ( identifier[self] , identifier[variable] ):
literal[string]
keyword[if] identifier[variable] == literal[string] :
keyword[return] identifier[self] . identifier[value]
keyword[if] identifier[variable] == literal[string] :
keyword[return] identifier[self] . identifier[timedelta]
keyword[raise] identifier[ValueError] ( literal[string] , identifier[variable] ) | def replace_variable(self, variable):
"""Substitute variables with numeric values"""
if variable == 'x':
return self.value # depends on [control=['if'], data=[]]
if variable == 't':
return self.timedelta # depends on [control=['if'], data=[]]
raise ValueError('Invalid variable %s', variable) |
def resolve_operator(name):
"""
Get the :namedtuple:`wily.operators.Operator` for a given name.
:param name: The name of the operator
:return: The operator type
"""
if name.lower() in ALL_OPERATORS:
return ALL_OPERATORS[name.lower()]
else:
raise ValueError(f"Operator {name} not recognised.") | def function[resolve_operator, parameter[name]]:
constant[
Get the :namedtuple:`wily.operators.Operator` for a given name.
:param name: The name of the operator
:return: The operator type
]
if compare[call[name[name].lower, parameter[]] in name[ALL_OPERATORS]] begin[:]
return[call[name[ALL_OPERATORS]][call[name[name].lower, parameter[]]]] | keyword[def] identifier[resolve_operator] ( identifier[name] ):
literal[string]
keyword[if] identifier[name] . identifier[lower] () keyword[in] identifier[ALL_OPERATORS] :
keyword[return] identifier[ALL_OPERATORS] [ identifier[name] . identifier[lower] ()]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def resolve_operator(name):
"""
Get the :namedtuple:`wily.operators.Operator` for a given name.
:param name: The name of the operator
:return: The operator type
"""
if name.lower() in ALL_OPERATORS:
return ALL_OPERATORS[name.lower()] # depends on [control=['if'], data=['ALL_OPERATORS']]
else:
raise ValueError(f'Operator {name} not recognised.') |
def list_worktrees(cwd,
stale=False,
user=None,
password=None,
output_encoding=None,
**kwargs):
'''
.. versionadded:: 2015.8.0
Returns information on worktrees
.. versionchanged:: 2015.8.4
Version 2.7.0 added the ``list`` subcommand to `git-worktree(1)`_ which
provides a lot of additional information. The return data has been
changed to include this information, even for pre-2.7.0 versions of
git. In addition, if a worktree has a detached head, then any tags
which point to the worktree's HEAD will be included in the return data.
.. note::
By default, only worktrees for which the worktree directory is still
present are returned, but this can be changed using the ``all`` and
``stale`` arguments (described below).
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
all : False
If ``True``, then return all worktrees tracked under
$GIT_DIR/worktrees, including ones for which the gitdir is no longer
present.
stale : False
If ``True``, return *only* worktrees whose gitdir is no longer present.
.. note::
Only one of ``all`` and ``stale`` can be set to ``True``.
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
CLI Examples:
.. code-block:: bash
salt myminion git.list_worktrees /path/to/repo
salt myminion git.list_worktrees /path/to/repo all=True
salt myminion git.list_worktrees /path/to/repo stale=True
'''
if not _check_worktree_support(failhard=True):
return {}
cwd = _expand_path(cwd, user)
kwargs = salt.utils.args.clean_kwargs(**kwargs)
all_ = kwargs.pop('all', False)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
if all_ and stale:
raise CommandExecutionError(
'\'all\' and \'stale\' cannot both be set to True'
)
def _git_tag_points_at(cwd, rev, user=None, password=None,
output_encoding=None):
'''
Get any tags that point at a
'''
return _git_run(['git', 'tag', '--points-at', rev],
cwd=cwd,
user=user,
password=password,
output_encoding=output_encoding)['stdout'].splitlines()
def _desired(is_stale, all_, stale):
'''
Common logic to determine whether or not to include the worktree info
in the return data.
'''
if is_stale:
if not all_ and not stale:
# Stale worktrees are not desired, skip this one
return False
else:
if stale:
# Only stale worktrees are desired, skip this one
return False
return True
def _duplicate_worktree_path(path):
'''
Log errors to the minion log notifying of duplicate worktree paths.
These should not be there, but may show up due to a bug in git 2.7.0.
'''
log.error(
'git.worktree: Duplicate worktree path %s. This may be caused by '
'a known issue in git 2.7.0 (see '
'http://permalink.gmane.org/gmane.comp.version-control.git/283998)',
path
)
tracked_data_points = ('worktree', 'HEAD', 'branch')
ret = {}
git_version = _LooseVersion(version(versioninfo=False))
has_native_list_subcommand = git_version >= _LooseVersion('2.7.0')
if has_native_list_subcommand:
out = _git_run(['git', 'worktree', 'list', '--porcelain'],
cwd=cwd,
user=user,
password=password,
output_encoding=output_encoding)
if out['retcode'] != 0:
msg = 'Failed to list worktrees'
if out['stderr']:
msg += ': {0}'.format(out['stderr'])
raise CommandExecutionError(msg)
def _untracked_item(line):
'''
Log a warning
'''
log.warning('git.worktree: Untracked line item \'%s\'', line)
for individual_worktree in \
salt.utils.itertools.split(out['stdout'].strip(), '\n\n'):
# Initialize the dict where we're storing the tracked data points
worktree_data = dict([(x, '') for x in tracked_data_points])
for line in salt.utils.itertools.split(individual_worktree, '\n'):
try:
type_, value = line.strip().split(None, 1)
except ValueError:
if line == 'detached':
type_ = 'branch'
value = 'detached'
else:
_untracked_item(line)
continue
if type_ not in tracked_data_points:
_untracked_item(line)
continue
if worktree_data[type_]:
log.error(
'git.worktree: Unexpected duplicate %s entry '
'\'%s\', skipping', type_, line
)
continue
worktree_data[type_] = value
# Check for missing data points
missing = [x for x in tracked_data_points if not worktree_data[x]]
if missing:
log.error(
'git.worktree: Incomplete worktree data, missing the '
'following information: %s. Full data below:\n%s',
', '.join(missing), individual_worktree
)
continue
worktree_is_stale = not os.path.isdir(worktree_data['worktree'])
if not _desired(worktree_is_stale, all_, stale):
continue
if worktree_data['worktree'] in ret:
_duplicate_worktree_path(worktree_data['worktree'])
wt_ptr = ret.setdefault(worktree_data['worktree'], {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['HEAD'] = worktree_data['HEAD']
wt_ptr['detached'] = worktree_data['branch'] == 'detached'
if wt_ptr['detached']:
wt_ptr['branch'] = None
# Check to see if HEAD points at a tag
tags_found = _git_tag_points_at(cwd,
wt_ptr['HEAD'],
user=user,
password=password,
output_encoding=output_encoding)
if tags_found:
wt_ptr['tags'] = tags_found
else:
wt_ptr['branch'] = \
worktree_data['branch'].replace('refs/heads/', '', 1)
return ret
else:
toplevel = _get_toplevel(cwd, user=user, password=password,
output_encoding=output_encoding)
try:
worktree_root = rev_parse(cwd,
opts=['--git-path', 'worktrees'],
user=user,
password=password,
output_encoding=output_encoding)
except CommandExecutionError as exc:
msg = 'Failed to find worktree location for ' + cwd
log.error(msg, exc_info_on_loglevel=logging.DEBUG)
raise CommandExecutionError(msg)
if worktree_root.startswith('.git'):
worktree_root = os.path.join(cwd, worktree_root)
if not os.path.isdir(worktree_root):
raise CommandExecutionError(
'Worktree admin directory {0} not present'.format(worktree_root)
)
def _read_file(path):
'''
Return contents of a single line file with EOF newline stripped
'''
try:
with salt.utils.files.fopen(path, 'r') as fp_:
for line in fp_:
ret = salt.utils.stringutils.to_unicode(line).strip()
# Ignore other lines, if they exist (which they
# shouldn't)
break
return ret
except (IOError, OSError) as exc:
# Raise a CommandExecutionError
salt.utils.files.process_read_exception(exc, path)
for worktree_name in os.listdir(worktree_root):
admin_dir = os.path.join(worktree_root, worktree_name)
gitdir_file = os.path.join(admin_dir, 'gitdir')
head_file = os.path.join(admin_dir, 'HEAD')
wt_loc = _read_file(gitdir_file)
head_ref = _read_file(head_file)
if not os.path.isabs(wt_loc):
log.error(
'Non-absolute path found in %s. If git 2.7.0 was '
'installed and then downgraded, this was likely caused '
'by a known issue in git 2.7.0. See '
'http://permalink.gmane.org/gmane.comp.version-control'
'.git/283998 for more information.', gitdir_file
)
# Emulate what 'git worktree list' does under-the-hood, and
# that is using the toplevel directory. It will still give
# inaccurate results, but will avoid a traceback.
wt_loc = toplevel
if wt_loc.endswith('/.git'):
wt_loc = wt_loc[:-5]
worktree_is_stale = not os.path.isdir(wt_loc)
if not _desired(worktree_is_stale, all_, stale):
continue
if wt_loc in ret:
_duplicate_worktree_path(wt_loc)
if head_ref.startswith('ref: '):
head_ref = head_ref.split(None, 1)[-1]
wt_branch = head_ref.replace('refs/heads/', '', 1)
wt_head = rev_parse(cwd,
rev=head_ref,
user=user,
password=password,
output_encoding=output_encoding)
wt_detached = False
else:
wt_branch = None
wt_head = head_ref
wt_detached = True
wt_ptr = ret.setdefault(wt_loc, {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['branch'] = wt_branch
wt_ptr['HEAD'] = wt_head
wt_ptr['detached'] = wt_detached
# Check to see if HEAD points at a tag
if wt_detached:
tags_found = _git_tag_points_at(cwd,
wt_head,
user=user,
password=password,
output_encoding=output_encoding)
if tags_found:
wt_ptr['tags'] = tags_found
return ret | def function[list_worktrees, parameter[cwd, stale, user, password, output_encoding]]:
constant[
.. versionadded:: 2015.8.0
Returns information on worktrees
.. versionchanged:: 2015.8.4
Version 2.7.0 added the ``list`` subcommand to `git-worktree(1)`_ which
provides a lot of additional information. The return data has been
changed to include this information, even for pre-2.7.0 versions of
git. In addition, if a worktree has a detached head, then any tags
which point to the worktree's HEAD will be included in the return data.
.. note::
By default, only worktrees for which the worktree directory is still
present are returned, but this can be changed using the ``all`` and
``stale`` arguments (described below).
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
all : False
If ``True``, then return all worktrees tracked under
$GIT_DIR/worktrees, including ones for which the gitdir is no longer
present.
stale : False
If ``True``, return *only* worktrees whose gitdir is no longer present.
.. note::
Only one of ``all`` and ``stale`` can be set to ``True``.
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
CLI Examples:
.. code-block:: bash
salt myminion git.list_worktrees /path/to/repo
salt myminion git.list_worktrees /path/to/repo all=True
salt myminion git.list_worktrees /path/to/repo stale=True
]
if <ast.UnaryOp object at 0x7da1b21350f0> begin[:]
return[dictionary[[], []]]
variable[cwd] assign[=] call[name[_expand_path], parameter[name[cwd], name[user]]]
variable[kwargs] assign[=] call[name[salt].utils.args.clean_kwargs, parameter[]]
variable[all_] assign[=] call[name[kwargs].pop, parameter[constant[all], constant[False]]]
if name[kwargs] begin[:]
call[name[salt].utils.args.invalid_kwargs, parameter[name[kwargs]]]
if <ast.BoolOp object at 0x7da1b21345b0> begin[:]
<ast.Raise object at 0x7da1b2135690>
def function[_git_tag_points_at, parameter[cwd, rev, user, password, output_encoding]]:
constant[
Get any tags that point at a
]
return[call[call[call[name[_git_run], parameter[list[[<ast.Constant object at 0x7da1b2135f60>, <ast.Constant object at 0x7da1b21372e0>, <ast.Constant object at 0x7da1b2137250>, <ast.Name object at 0x7da1b2137bb0>]]]]][constant[stdout]].splitlines, parameter[]]]
def function[_desired, parameter[is_stale, all_, stale]]:
constant[
Common logic to determine whether or not to include the worktree info
in the return data.
]
if name[is_stale] begin[:]
if <ast.BoolOp object at 0x7da1b2136140> begin[:]
return[constant[False]]
return[constant[True]]
def function[_duplicate_worktree_path, parameter[path]]:
constant[
Log errors to the minion log notifying of duplicate worktree paths.
These should not be there, but may show up due to a bug in git 2.7.0.
]
call[name[log].error, parameter[constant[git.worktree: Duplicate worktree path %s. This may be caused by a known issue in git 2.7.0 (see http://permalink.gmane.org/gmane.comp.version-control.git/283998)], name[path]]]
variable[tracked_data_points] assign[=] tuple[[<ast.Constant object at 0x7da1b2137df0>, <ast.Constant object at 0x7da1b2137610>, <ast.Constant object at 0x7da1b2136380>]]
variable[ret] assign[=] dictionary[[], []]
variable[git_version] assign[=] call[name[_LooseVersion], parameter[call[name[version], parameter[]]]]
variable[has_native_list_subcommand] assign[=] compare[name[git_version] greater_or_equal[>=] call[name[_LooseVersion], parameter[constant[2.7.0]]]]
if name[has_native_list_subcommand] begin[:]
variable[out] assign[=] call[name[_git_run], parameter[list[[<ast.Constant object at 0x7da1b2134b50>, <ast.Constant object at 0x7da1b2134ee0>, <ast.Constant object at 0x7da1b2135000>, <ast.Constant object at 0x7da1b2137be0>]]]]
if compare[call[name[out]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
variable[msg] assign[=] constant[Failed to list worktrees]
if call[name[out]][constant[stderr]] begin[:]
<ast.AugAssign object at 0x7da1b2134e50>
<ast.Raise object at 0x7da1b2135ae0>
def function[_untracked_item, parameter[line]]:
constant[
Log a warning
]
call[name[log].warning, parameter[constant[git.worktree: Untracked line item '%s'], name[line]]]
for taget[name[individual_worktree]] in starred[call[name[salt].utils.itertools.split, parameter[call[call[name[out]][constant[stdout]].strip, parameter[]], constant[
]]]] begin[:]
variable[worktree_data] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da1b2137af0>]]
for taget[name[line]] in starred[call[name[salt].utils.itertools.split, parameter[name[individual_worktree], constant[
]]]] begin[:]
<ast.Try object at 0x7da1b21363b0>
if compare[name[type_] <ast.NotIn object at 0x7da2590d7190> name[tracked_data_points]] begin[:]
call[name[_untracked_item], parameter[name[line]]]
continue
if call[name[worktree_data]][name[type_]] begin[:]
call[name[log].error, parameter[constant[git.worktree: Unexpected duplicate %s entry '%s', skipping], name[type_], name[line]]]
continue
call[name[worktree_data]][name[type_]] assign[=] name[value]
variable[missing] assign[=] <ast.ListComp object at 0x7da20c6a85b0>
if name[missing] begin[:]
call[name[log].error, parameter[constant[git.worktree: Incomplete worktree data, missing the following information: %s. Full data below:
%s], call[constant[, ].join, parameter[name[missing]]], name[individual_worktree]]]
continue
variable[worktree_is_stale] assign[=] <ast.UnaryOp object at 0x7da20c6a9ba0>
if <ast.UnaryOp object at 0x7da20c6a89d0> begin[:]
continue
if compare[call[name[worktree_data]][constant[worktree]] in name[ret]] begin[:]
call[name[_duplicate_worktree_path], parameter[call[name[worktree_data]][constant[worktree]]]]
variable[wt_ptr] assign[=] call[name[ret].setdefault, parameter[call[name[worktree_data]][constant[worktree]], dictionary[[], []]]]
call[name[wt_ptr]][constant[stale]] assign[=] name[worktree_is_stale]
call[name[wt_ptr]][constant[HEAD]] assign[=] call[name[worktree_data]][constant[HEAD]]
call[name[wt_ptr]][constant[detached]] assign[=] compare[call[name[worktree_data]][constant[branch]] equal[==] constant[detached]]
if call[name[wt_ptr]][constant[detached]] begin[:]
call[name[wt_ptr]][constant[branch]] assign[=] constant[None]
variable[tags_found] assign[=] call[name[_git_tag_points_at], parameter[name[cwd], call[name[wt_ptr]][constant[HEAD]]]]
if name[tags_found] begin[:]
call[name[wt_ptr]][constant[tags]] assign[=] name[tags_found]
return[name[ret]]
return[name[ret]] | keyword[def] identifier[list_worktrees] ( identifier[cwd] ,
identifier[stale] = keyword[False] ,
identifier[user] = keyword[None] ,
identifier[password] = keyword[None] ,
identifier[output_encoding] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[_check_worktree_support] ( identifier[failhard] = keyword[True] ):
keyword[return] {}
identifier[cwd] = identifier[_expand_path] ( identifier[cwd] , identifier[user] )
identifier[kwargs] = identifier[salt] . identifier[utils] . identifier[args] . identifier[clean_kwargs] (** identifier[kwargs] )
identifier[all_] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
keyword[if] identifier[kwargs] :
identifier[salt] . identifier[utils] . identifier[args] . identifier[invalid_kwargs] ( identifier[kwargs] )
keyword[if] identifier[all_] keyword[and] identifier[stale] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
)
keyword[def] identifier[_git_tag_points_at] ( identifier[cwd] , identifier[rev] , identifier[user] = keyword[None] , identifier[password] = keyword[None] ,
identifier[output_encoding] = keyword[None] ):
literal[string]
keyword[return] identifier[_git_run] ([ literal[string] , literal[string] , literal[string] , identifier[rev] ],
identifier[cwd] = identifier[cwd] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )[ literal[string] ]. identifier[splitlines] ()
keyword[def] identifier[_desired] ( identifier[is_stale] , identifier[all_] , identifier[stale] ):
literal[string]
keyword[if] identifier[is_stale] :
keyword[if] keyword[not] identifier[all_] keyword[and] keyword[not] identifier[stale] :
keyword[return] keyword[False]
keyword[else] :
keyword[if] identifier[stale] :
keyword[return] keyword[False]
keyword[return] keyword[True]
keyword[def] identifier[_duplicate_worktree_path] ( identifier[path] ):
literal[string]
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string] ,
identifier[path]
)
identifier[tracked_data_points] =( literal[string] , literal[string] , literal[string] )
identifier[ret] ={}
identifier[git_version] = identifier[_LooseVersion] ( identifier[version] ( identifier[versioninfo] = keyword[False] ))
identifier[has_native_list_subcommand] = identifier[git_version] >= identifier[_LooseVersion] ( literal[string] )
keyword[if] identifier[has_native_list_subcommand] :
identifier[out] = identifier[_git_run] ([ literal[string] , literal[string] , literal[string] , literal[string] ],
identifier[cwd] = identifier[cwd] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[if] identifier[out] [ literal[string] ]!= literal[int] :
identifier[msg] = literal[string]
keyword[if] identifier[out] [ literal[string] ]:
identifier[msg] += literal[string] . identifier[format] ( identifier[out] [ literal[string] ])
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[def] identifier[_untracked_item] ( identifier[line] ):
literal[string]
identifier[log] . identifier[warning] ( literal[string] , identifier[line] )
keyword[for] identifier[individual_worktree] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[out] [ literal[string] ]. identifier[strip] (), literal[string] ):
identifier[worktree_data] = identifier[dict] ([( identifier[x] , literal[string] ) keyword[for] identifier[x] keyword[in] identifier[tracked_data_points] ])
keyword[for] identifier[line] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[individual_worktree] , literal[string] ):
keyword[try] :
identifier[type_] , identifier[value] = identifier[line] . identifier[strip] (). identifier[split] ( keyword[None] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[line] == literal[string] :
identifier[type_] = literal[string]
identifier[value] = literal[string]
keyword[else] :
identifier[_untracked_item] ( identifier[line] )
keyword[continue]
keyword[if] identifier[type_] keyword[not] keyword[in] identifier[tracked_data_points] :
identifier[_untracked_item] ( identifier[line] )
keyword[continue]
keyword[if] identifier[worktree_data] [ identifier[type_] ]:
identifier[log] . identifier[error] (
literal[string]
literal[string] , identifier[type_] , identifier[line]
)
keyword[continue]
identifier[worktree_data] [ identifier[type_] ]= identifier[value]
identifier[missing] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[tracked_data_points] keyword[if] keyword[not] identifier[worktree_data] [ identifier[x] ]]
keyword[if] identifier[missing] :
identifier[log] . identifier[error] (
literal[string]
literal[string] ,
literal[string] . identifier[join] ( identifier[missing] ), identifier[individual_worktree]
)
keyword[continue]
identifier[worktree_is_stale] = keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[worktree_data] [ literal[string] ])
keyword[if] keyword[not] identifier[_desired] ( identifier[worktree_is_stale] , identifier[all_] , identifier[stale] ):
keyword[continue]
keyword[if] identifier[worktree_data] [ literal[string] ] keyword[in] identifier[ret] :
identifier[_duplicate_worktree_path] ( identifier[worktree_data] [ literal[string] ])
identifier[wt_ptr] = identifier[ret] . identifier[setdefault] ( identifier[worktree_data] [ literal[string] ],{})
identifier[wt_ptr] [ literal[string] ]= identifier[worktree_is_stale]
identifier[wt_ptr] [ literal[string] ]= identifier[worktree_data] [ literal[string] ]
identifier[wt_ptr] [ literal[string] ]= identifier[worktree_data] [ literal[string] ]== literal[string]
keyword[if] identifier[wt_ptr] [ literal[string] ]:
identifier[wt_ptr] [ literal[string] ]= keyword[None]
identifier[tags_found] = identifier[_git_tag_points_at] ( identifier[cwd] ,
identifier[wt_ptr] [ literal[string] ],
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[if] identifier[tags_found] :
identifier[wt_ptr] [ literal[string] ]= identifier[tags_found]
keyword[else] :
identifier[wt_ptr] [ literal[string] ]= identifier[worktree_data] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] , literal[int] )
keyword[return] identifier[ret]
keyword[else] :
identifier[toplevel] = identifier[_get_toplevel] ( identifier[cwd] , identifier[user] = identifier[user] , identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[try] :
identifier[worktree_root] = identifier[rev_parse] ( identifier[cwd] ,
identifier[opts] =[ literal[string] , literal[string] ],
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[exc] :
identifier[msg] = literal[string] + identifier[cwd]
identifier[log] . identifier[error] ( identifier[msg] , identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG] )
keyword[raise] identifier[CommandExecutionError] ( identifier[msg] )
keyword[if] identifier[worktree_root] . identifier[startswith] ( literal[string] ):
identifier[worktree_root] = identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , identifier[worktree_root] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[worktree_root] ):
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[worktree_root] )
)
keyword[def] identifier[_read_file] ( identifier[path] ):
literal[string]
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[path] , literal[string] ) keyword[as] identifier[fp_] :
keyword[for] identifier[line] keyword[in] identifier[fp_] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[line] ). identifier[strip] ()
keyword[break]
keyword[return] identifier[ret]
keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[exc] :
identifier[salt] . identifier[utils] . identifier[files] . identifier[process_read_exception] ( identifier[exc] , identifier[path] )
keyword[for] identifier[worktree_name] keyword[in] identifier[os] . identifier[listdir] ( identifier[worktree_root] ):
identifier[admin_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[worktree_root] , identifier[worktree_name] )
identifier[gitdir_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[admin_dir] , literal[string] )
identifier[head_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[admin_dir] , literal[string] )
identifier[wt_loc] = identifier[_read_file] ( identifier[gitdir_file] )
identifier[head_ref] = identifier[_read_file] ( identifier[head_file] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[wt_loc] ):
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] , identifier[gitdir_file]
)
identifier[wt_loc] = identifier[toplevel]
keyword[if] identifier[wt_loc] . identifier[endswith] ( literal[string] ):
identifier[wt_loc] = identifier[wt_loc] [:- literal[int] ]
identifier[worktree_is_stale] = keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[wt_loc] )
keyword[if] keyword[not] identifier[_desired] ( identifier[worktree_is_stale] , identifier[all_] , identifier[stale] ):
keyword[continue]
keyword[if] identifier[wt_loc] keyword[in] identifier[ret] :
identifier[_duplicate_worktree_path] ( identifier[wt_loc] )
keyword[if] identifier[head_ref] . identifier[startswith] ( literal[string] ):
identifier[head_ref] = identifier[head_ref] . identifier[split] ( keyword[None] , literal[int] )[- literal[int] ]
identifier[wt_branch] = identifier[head_ref] . identifier[replace] ( literal[string] , literal[string] , literal[int] )
identifier[wt_head] = identifier[rev_parse] ( identifier[cwd] ,
identifier[rev] = identifier[head_ref] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
identifier[wt_detached] = keyword[False]
keyword[else] :
identifier[wt_branch] = keyword[None]
identifier[wt_head] = identifier[head_ref]
identifier[wt_detached] = keyword[True]
identifier[wt_ptr] = identifier[ret] . identifier[setdefault] ( identifier[wt_loc] ,{})
identifier[wt_ptr] [ literal[string] ]= identifier[worktree_is_stale]
identifier[wt_ptr] [ literal[string] ]= identifier[wt_branch]
identifier[wt_ptr] [ literal[string] ]= identifier[wt_head]
identifier[wt_ptr] [ literal[string] ]= identifier[wt_detached]
keyword[if] identifier[wt_detached] :
identifier[tags_found] = identifier[_git_tag_points_at] ( identifier[cwd] ,
identifier[wt_head] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[if] identifier[tags_found] :
identifier[wt_ptr] [ literal[string] ]= identifier[tags_found]
keyword[return] identifier[ret] | def list_worktrees(cwd, stale=False, user=None, password=None, output_encoding=None, **kwargs):
"""
.. versionadded:: 2015.8.0
Returns information on worktrees
.. versionchanged:: 2015.8.4
Version 2.7.0 added the ``list`` subcommand to `git-worktree(1)`_ which
provides a lot of additional information. The return data has been
changed to include this information, even for pre-2.7.0 versions of
git. In addition, if a worktree has a detached head, then any tags
which point to the worktree's HEAD will be included in the return data.
.. note::
By default, only worktrees for which the worktree directory is still
present are returned, but this can be changed using the ``all`` and
``stale`` arguments (described below).
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
password
Windows only. Required when specifying ``user``. This parameter will be
ignored on non-Windows platforms.
.. versionadded:: 2016.3.4
all : False
If ``True``, then return all worktrees tracked under
$GIT_DIR/worktrees, including ones for which the gitdir is no longer
present.
stale : False
If ``True``, return *only* worktrees whose gitdir is no longer present.
.. note::
Only one of ``all`` and ``stale`` can be set to ``True``.
output_encoding
Use this option to specify which encoding to use to decode the output
from any git commands which are run. This should not be needed in most
cases.
.. note::
This should only be needed if the files in the repository were
created with filenames using an encoding other than UTF-8 to handle
Unicode characters.
.. versionadded:: 2018.3.1
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
CLI Examples:
.. code-block:: bash
salt myminion git.list_worktrees /path/to/repo
salt myminion git.list_worktrees /path/to/repo all=True
salt myminion git.list_worktrees /path/to/repo stale=True
"""
if not _check_worktree_support(failhard=True):
return {} # depends on [control=['if'], data=[]]
cwd = _expand_path(cwd, user)
kwargs = salt.utils.args.clean_kwargs(**kwargs)
all_ = kwargs.pop('all', False)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs) # depends on [control=['if'], data=[]]
if all_ and stale:
raise CommandExecutionError("'all' and 'stale' cannot both be set to True") # depends on [control=['if'], data=[]]
def _git_tag_points_at(cwd, rev, user=None, password=None, output_encoding=None):
"""
Get any tags that point at a
"""
return _git_run(['git', 'tag', '--points-at', rev], cwd=cwd, user=user, password=password, output_encoding=output_encoding)['stdout'].splitlines()
def _desired(is_stale, all_, stale):
"""
Common logic to determine whether or not to include the worktree info
in the return data.
"""
if is_stale:
if not all_ and (not stale):
# Stale worktrees are not desired, skip this one
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif stale:
# Only stale worktrees are desired, skip this one
return False # depends on [control=['if'], data=[]]
return True
def _duplicate_worktree_path(path):
"""
Log errors to the minion log notifying of duplicate worktree paths.
These should not be there, but may show up due to a bug in git 2.7.0.
"""
log.error('git.worktree: Duplicate worktree path %s. This may be caused by a known issue in git 2.7.0 (see http://permalink.gmane.org/gmane.comp.version-control.git/283998)', path)
tracked_data_points = ('worktree', 'HEAD', 'branch')
ret = {}
git_version = _LooseVersion(version(versioninfo=False))
has_native_list_subcommand = git_version >= _LooseVersion('2.7.0')
if has_native_list_subcommand:
out = _git_run(['git', 'worktree', 'list', '--porcelain'], cwd=cwd, user=user, password=password, output_encoding=output_encoding)
if out['retcode'] != 0:
msg = 'Failed to list worktrees'
if out['stderr']:
msg += ': {0}'.format(out['stderr']) # depends on [control=['if'], data=[]]
raise CommandExecutionError(msg) # depends on [control=['if'], data=[]]
def _untracked_item(line):
"""
Log a warning
"""
log.warning("git.worktree: Untracked line item '%s'", line)
for individual_worktree in salt.utils.itertools.split(out['stdout'].strip(), '\n\n'):
# Initialize the dict where we're storing the tracked data points
worktree_data = dict([(x, '') for x in tracked_data_points])
for line in salt.utils.itertools.split(individual_worktree, '\n'):
try:
(type_, value) = line.strip().split(None, 1) # depends on [control=['try'], data=[]]
except ValueError:
if line == 'detached':
type_ = 'branch'
value = 'detached' # depends on [control=['if'], data=[]]
else:
_untracked_item(line)
continue # depends on [control=['except'], data=[]]
if type_ not in tracked_data_points:
_untracked_item(line)
continue # depends on [control=['if'], data=[]]
if worktree_data[type_]:
log.error("git.worktree: Unexpected duplicate %s entry '%s', skipping", type_, line)
continue # depends on [control=['if'], data=[]]
worktree_data[type_] = value # depends on [control=['for'], data=['line']]
# Check for missing data points
missing = [x for x in tracked_data_points if not worktree_data[x]]
if missing:
log.error('git.worktree: Incomplete worktree data, missing the following information: %s. Full data below:\n%s', ', '.join(missing), individual_worktree)
continue # depends on [control=['if'], data=[]]
worktree_is_stale = not os.path.isdir(worktree_data['worktree'])
if not _desired(worktree_is_stale, all_, stale):
continue # depends on [control=['if'], data=[]]
if worktree_data['worktree'] in ret:
_duplicate_worktree_path(worktree_data['worktree']) # depends on [control=['if'], data=[]]
wt_ptr = ret.setdefault(worktree_data['worktree'], {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['HEAD'] = worktree_data['HEAD']
wt_ptr['detached'] = worktree_data['branch'] == 'detached'
if wt_ptr['detached']:
wt_ptr['branch'] = None
# Check to see if HEAD points at a tag
tags_found = _git_tag_points_at(cwd, wt_ptr['HEAD'], user=user, password=password, output_encoding=output_encoding)
if tags_found:
wt_ptr['tags'] = tags_found # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
wt_ptr['branch'] = worktree_data['branch'].replace('refs/heads/', '', 1) # depends on [control=['for'], data=['individual_worktree']]
return ret # depends on [control=['if'], data=[]]
else:
toplevel = _get_toplevel(cwd, user=user, password=password, output_encoding=output_encoding)
try:
worktree_root = rev_parse(cwd, opts=['--git-path', 'worktrees'], user=user, password=password, output_encoding=output_encoding) # depends on [control=['try'], data=[]]
except CommandExecutionError as exc:
msg = 'Failed to find worktree location for ' + cwd
log.error(msg, exc_info_on_loglevel=logging.DEBUG)
raise CommandExecutionError(msg) # depends on [control=['except'], data=[]]
if worktree_root.startswith('.git'):
worktree_root = os.path.join(cwd, worktree_root) # depends on [control=['if'], data=[]]
if not os.path.isdir(worktree_root):
raise CommandExecutionError('Worktree admin directory {0} not present'.format(worktree_root)) # depends on [control=['if'], data=[]]
def _read_file(path):
"""
Return contents of a single line file with EOF newline stripped
"""
try:
with salt.utils.files.fopen(path, 'r') as fp_:
for line in fp_:
ret = salt.utils.stringutils.to_unicode(line).strip()
# Ignore other lines, if they exist (which they
# shouldn't)
break # depends on [control=['for'], data=['line']]
return ret # depends on [control=['with'], data=['fp_']] # depends on [control=['try'], data=[]]
except (IOError, OSError) as exc:
# Raise a CommandExecutionError
salt.utils.files.process_read_exception(exc, path) # depends on [control=['except'], data=['exc']]
for worktree_name in os.listdir(worktree_root):
admin_dir = os.path.join(worktree_root, worktree_name)
gitdir_file = os.path.join(admin_dir, 'gitdir')
head_file = os.path.join(admin_dir, 'HEAD')
wt_loc = _read_file(gitdir_file)
head_ref = _read_file(head_file)
if not os.path.isabs(wt_loc):
log.error('Non-absolute path found in %s. If git 2.7.0 was installed and then downgraded, this was likely caused by a known issue in git 2.7.0. See http://permalink.gmane.org/gmane.comp.version-control.git/283998 for more information.', gitdir_file)
# Emulate what 'git worktree list' does under-the-hood, and
# that is using the toplevel directory. It will still give
# inaccurate results, but will avoid a traceback.
wt_loc = toplevel # depends on [control=['if'], data=[]]
if wt_loc.endswith('/.git'):
wt_loc = wt_loc[:-5] # depends on [control=['if'], data=[]]
worktree_is_stale = not os.path.isdir(wt_loc)
if not _desired(worktree_is_stale, all_, stale):
continue # depends on [control=['if'], data=[]]
if wt_loc in ret:
_duplicate_worktree_path(wt_loc) # depends on [control=['if'], data=['wt_loc']]
if head_ref.startswith('ref: '):
head_ref = head_ref.split(None, 1)[-1]
wt_branch = head_ref.replace('refs/heads/', '', 1)
wt_head = rev_parse(cwd, rev=head_ref, user=user, password=password, output_encoding=output_encoding)
wt_detached = False # depends on [control=['if'], data=[]]
else:
wt_branch = None
wt_head = head_ref
wt_detached = True
wt_ptr = ret.setdefault(wt_loc, {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['branch'] = wt_branch
wt_ptr['HEAD'] = wt_head
wt_ptr['detached'] = wt_detached
# Check to see if HEAD points at a tag
if wt_detached:
tags_found = _git_tag_points_at(cwd, wt_head, user=user, password=password, output_encoding=output_encoding)
if tags_found:
wt_ptr['tags'] = tags_found # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['worktree_name']]
return ret |
def verify(self):
"""Checks all parameters for invalidating conditions
:returns: str -- message if error, 0 otherwise
"""
for row in range(self.nrows()):
result = self.verify_row(row)
if result != 0:
return result
return 0 | def function[verify, parameter[self]]:
constant[Checks all parameters for invalidating conditions
:returns: str -- message if error, 0 otherwise
]
for taget[name[row]] in starred[call[name[range], parameter[call[name[self].nrows, parameter[]]]]] begin[:]
variable[result] assign[=] call[name[self].verify_row, parameter[name[row]]]
if compare[name[result] not_equal[!=] constant[0]] begin[:]
return[name[result]]
return[constant[0]] | keyword[def] identifier[verify] ( identifier[self] ):
literal[string]
keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[self] . identifier[nrows] ()):
identifier[result] = identifier[self] . identifier[verify_row] ( identifier[row] )
keyword[if] identifier[result] != literal[int] :
keyword[return] identifier[result]
keyword[return] literal[int] | def verify(self):
"""Checks all parameters for invalidating conditions
:returns: str -- message if error, 0 otherwise
"""
for row in range(self.nrows()):
result = self.verify_row(row)
if result != 0:
return result # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['row']]
return 0 |
def vesting(ctx, account):
""" List accounts vesting balances
"""
account = Account(account, full=True)
t = [["vesting_id", "claimable"]]
for vest in account["vesting_balances"]:
vesting = Vesting(vest)
t.append([vesting["id"], str(vesting.claimable)])
print_table(t) | def function[vesting, parameter[ctx, account]]:
constant[ List accounts vesting balances
]
variable[account] assign[=] call[name[Account], parameter[name[account]]]
variable[t] assign[=] list[[<ast.List object at 0x7da20c76f6a0>]]
for taget[name[vest]] in starred[call[name[account]][constant[vesting_balances]]] begin[:]
variable[vesting] assign[=] call[name[Vesting], parameter[name[vest]]]
call[name[t].append, parameter[list[[<ast.Subscript object at 0x7da20c76cd60>, <ast.Call object at 0x7da20c76eb90>]]]]
call[name[print_table], parameter[name[t]]] | keyword[def] identifier[vesting] ( identifier[ctx] , identifier[account] ):
literal[string]
identifier[account] = identifier[Account] ( identifier[account] , identifier[full] = keyword[True] )
identifier[t] =[[ literal[string] , literal[string] ]]
keyword[for] identifier[vest] keyword[in] identifier[account] [ literal[string] ]:
identifier[vesting] = identifier[Vesting] ( identifier[vest] )
identifier[t] . identifier[append] ([ identifier[vesting] [ literal[string] ], identifier[str] ( identifier[vesting] . identifier[claimable] )])
identifier[print_table] ( identifier[t] ) | def vesting(ctx, account):
""" List accounts vesting balances
"""
account = Account(account, full=True)
t = [['vesting_id', 'claimable']]
for vest in account['vesting_balances']:
vesting = Vesting(vest)
t.append([vesting['id'], str(vesting.claimable)]) # depends on [control=['for'], data=['vest']]
print_table(t) |
def convert_number(string):
"""Convert a string to number
If int convert to int otherwise float
If not possible return None
"""
res = None
if isint(string):
res = int(string)
elif isfloat(string):
res = float(string)
return res | def function[convert_number, parameter[string]]:
constant[Convert a string to number
If int convert to int otherwise float
If not possible return None
]
variable[res] assign[=] constant[None]
if call[name[isint], parameter[name[string]]] begin[:]
variable[res] assign[=] call[name[int], parameter[name[string]]]
return[name[res]] | keyword[def] identifier[convert_number] ( identifier[string] ):
literal[string]
identifier[res] = keyword[None]
keyword[if] identifier[isint] ( identifier[string] ):
identifier[res] = identifier[int] ( identifier[string] )
keyword[elif] identifier[isfloat] ( identifier[string] ):
identifier[res] = identifier[float] ( identifier[string] )
keyword[return] identifier[res] | def convert_number(string):
"""Convert a string to number
If int convert to int otherwise float
If not possible return None
"""
res = None
if isint(string):
res = int(string) # depends on [control=['if'], data=[]]
elif isfloat(string):
res = float(string) # depends on [control=['if'], data=[]]
return res |
def _full_sub_array(data_obj, xj_path, create_dict_path):
"""Retrieves all array or dictionary elements for '*' JSON path marker.
:param dict|list data_obj: The current data object.
:param str xj_path: A json path.
:param bool create_dict_path create a dict path.
:return: tuple with two values: first is a result and second
a boolean flag telling if this value exists or not.
"""
if isinstance(data_obj, list):
if xj_path:
res = []
for d in data_obj:
val, exists = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val)
return tuple(res), True
else:
return tuple(data_obj), True
elif isinstance(data_obj, dict):
if xj_path:
res = []
for d in data_obj.values():
val, exists = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val)
return tuple(res), True
else:
return tuple(data_obj.values()), True
else:
return None, False | def function[_full_sub_array, parameter[data_obj, xj_path, create_dict_path]]:
constant[Retrieves all array or dictionary elements for '*' JSON path marker.
:param dict|list data_obj: The current data object.
:param str xj_path: A json path.
:param bool create_dict_path create a dict path.
:return: tuple with two values: first is a result and second
a boolean flag telling if this value exists or not.
]
if call[name[isinstance], parameter[name[data_obj], name[list]]] begin[:]
if name[xj_path] begin[:]
variable[res] assign[=] list[[]]
for taget[name[d]] in starred[name[data_obj]] begin[:]
<ast.Tuple object at 0x7da1b0ae3c10> assign[=] call[name[path_lookup], parameter[name[d], name[xj_path], name[create_dict_path]]]
if name[exists] begin[:]
call[name[res].append, parameter[name[val]]]
return[tuple[[<ast.Call object at 0x7da1b0ae37c0>, <ast.Constant object at 0x7da1b0ae0220>]]] | keyword[def] identifier[_full_sub_array] ( identifier[data_obj] , identifier[xj_path] , identifier[create_dict_path] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data_obj] , identifier[list] ):
keyword[if] identifier[xj_path] :
identifier[res] =[]
keyword[for] identifier[d] keyword[in] identifier[data_obj] :
identifier[val] , identifier[exists] = identifier[path_lookup] ( identifier[d] , identifier[xj_path] , identifier[create_dict_path] )
keyword[if] identifier[exists] :
identifier[res] . identifier[append] ( identifier[val] )
keyword[return] identifier[tuple] ( identifier[res] ), keyword[True]
keyword[else] :
keyword[return] identifier[tuple] ( identifier[data_obj] ), keyword[True]
keyword[elif] identifier[isinstance] ( identifier[data_obj] , identifier[dict] ):
keyword[if] identifier[xj_path] :
identifier[res] =[]
keyword[for] identifier[d] keyword[in] identifier[data_obj] . identifier[values] ():
identifier[val] , identifier[exists] = identifier[path_lookup] ( identifier[d] , identifier[xj_path] , identifier[create_dict_path] )
keyword[if] identifier[exists] :
identifier[res] . identifier[append] ( identifier[val] )
keyword[return] identifier[tuple] ( identifier[res] ), keyword[True]
keyword[else] :
keyword[return] identifier[tuple] ( identifier[data_obj] . identifier[values] ()), keyword[True]
keyword[else] :
keyword[return] keyword[None] , keyword[False] | def _full_sub_array(data_obj, xj_path, create_dict_path):
"""Retrieves all array or dictionary elements for '*' JSON path marker.
:param dict|list data_obj: The current data object.
:param str xj_path: A json path.
:param bool create_dict_path create a dict path.
:return: tuple with two values: first is a result and second
a boolean flag telling if this value exists or not.
"""
if isinstance(data_obj, list):
if xj_path:
res = []
for d in data_obj:
(val, exists) = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
return (tuple(res), True) # depends on [control=['if'], data=[]]
else:
return (tuple(data_obj), True) # depends on [control=['if'], data=[]]
elif isinstance(data_obj, dict):
if xj_path:
res = []
for d in data_obj.values():
(val, exists) = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
return (tuple(res), True) # depends on [control=['if'], data=[]]
else:
return (tuple(data_obj.values()), True) # depends on [control=['if'], data=[]]
else:
return (None, False) |
def mousePressEvent(self, event):
"""Override Qt method
Select line, and starts selection
"""
line_number = self.editor.get_linenumber_from_mouse_event(event)
self._pressed = line_number
self._released = line_number
self.editor.select_lines(self._pressed,
self._released) | def function[mousePressEvent, parameter[self, event]]:
constant[Override Qt method
Select line, and starts selection
]
variable[line_number] assign[=] call[name[self].editor.get_linenumber_from_mouse_event, parameter[name[event]]]
name[self]._pressed assign[=] name[line_number]
name[self]._released assign[=] name[line_number]
call[name[self].editor.select_lines, parameter[name[self]._pressed, name[self]._released]] | keyword[def] identifier[mousePressEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[line_number] = identifier[self] . identifier[editor] . identifier[get_linenumber_from_mouse_event] ( identifier[event] )
identifier[self] . identifier[_pressed] = identifier[line_number]
identifier[self] . identifier[_released] = identifier[line_number]
identifier[self] . identifier[editor] . identifier[select_lines] ( identifier[self] . identifier[_pressed] ,
identifier[self] . identifier[_released] ) | def mousePressEvent(self, event):
"""Override Qt method
Select line, and starts selection
"""
line_number = self.editor.get_linenumber_from_mouse_event(event)
self._pressed = line_number
self._released = line_number
self.editor.select_lines(self._pressed, self._released) |
def _make_tempy_tag(self, tag, attrs, void):
"""Searches in tempy.tags for the correct tag to use, if does not exists uses the TempyFactory to
create a custom tag."""
tempy_tag_cls = getattr(self.tempy_tags, tag.title(), None)
if not tempy_tag_cls:
unknow_maker = [self.unknown_tag_maker, self.unknown_tag_maker.Void][void]
tempy_tag_cls = unknow_maker[tag]
attrs = {Tag._TO_SPECIALS.get(k, k): v or True for k, v in attrs}
tempy_tag = tempy_tag_cls(**attrs)
if not self.current_tag:
self.result.append(tempy_tag)
if not void:
self.current_tag = tempy_tag
else:
if not tempy_tag._void:
self.current_tag(tempy_tag)
self.current_tag = self.current_tag.childs[-1] | def function[_make_tempy_tag, parameter[self, tag, attrs, void]]:
constant[Searches in tempy.tags for the correct tag to use, if does not exists uses the TempyFactory to
create a custom tag.]
variable[tempy_tag_cls] assign[=] call[name[getattr], parameter[name[self].tempy_tags, call[name[tag].title, parameter[]], constant[None]]]
if <ast.UnaryOp object at 0x7da1b0ed48e0> begin[:]
variable[unknow_maker] assign[=] call[list[[<ast.Attribute object at 0x7da1b0ed7a90>, <ast.Attribute object at 0x7da1b0ed4760>]]][name[void]]
variable[tempy_tag_cls] assign[=] call[name[unknow_maker]][name[tag]]
variable[attrs] assign[=] <ast.DictComp object at 0x7da1b0ed4880>
variable[tempy_tag] assign[=] call[name[tempy_tag_cls], parameter[]]
if <ast.UnaryOp object at 0x7da1b0e26110> begin[:]
call[name[self].result.append, parameter[name[tempy_tag]]]
if <ast.UnaryOp object at 0x7da1b0e26470> begin[:]
name[self].current_tag assign[=] name[tempy_tag] | keyword[def] identifier[_make_tempy_tag] ( identifier[self] , identifier[tag] , identifier[attrs] , identifier[void] ):
literal[string]
identifier[tempy_tag_cls] = identifier[getattr] ( identifier[self] . identifier[tempy_tags] , identifier[tag] . identifier[title] (), keyword[None] )
keyword[if] keyword[not] identifier[tempy_tag_cls] :
identifier[unknow_maker] =[ identifier[self] . identifier[unknown_tag_maker] , identifier[self] . identifier[unknown_tag_maker] . identifier[Void] ][ identifier[void] ]
identifier[tempy_tag_cls] = identifier[unknow_maker] [ identifier[tag] ]
identifier[attrs] ={ identifier[Tag] . identifier[_TO_SPECIALS] . identifier[get] ( identifier[k] , identifier[k] ): identifier[v] keyword[or] keyword[True] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attrs] }
identifier[tempy_tag] = identifier[tempy_tag_cls] (** identifier[attrs] )
keyword[if] keyword[not] identifier[self] . identifier[current_tag] :
identifier[self] . identifier[result] . identifier[append] ( identifier[tempy_tag] )
keyword[if] keyword[not] identifier[void] :
identifier[self] . identifier[current_tag] = identifier[tempy_tag]
keyword[else] :
keyword[if] keyword[not] identifier[tempy_tag] . identifier[_void] :
identifier[self] . identifier[current_tag] ( identifier[tempy_tag] )
identifier[self] . identifier[current_tag] = identifier[self] . identifier[current_tag] . identifier[childs] [- literal[int] ] | def _make_tempy_tag(self, tag, attrs, void):
"""Searches in tempy.tags for the correct tag to use, if does not exists uses the TempyFactory to
create a custom tag."""
tempy_tag_cls = getattr(self.tempy_tags, tag.title(), None)
if not tempy_tag_cls:
unknow_maker = [self.unknown_tag_maker, self.unknown_tag_maker.Void][void]
tempy_tag_cls = unknow_maker[tag] # depends on [control=['if'], data=[]]
attrs = {Tag._TO_SPECIALS.get(k, k): v or True for (k, v) in attrs}
tempy_tag = tempy_tag_cls(**attrs)
if not self.current_tag:
self.result.append(tempy_tag)
if not void:
self.current_tag = tempy_tag # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not tempy_tag._void:
self.current_tag(tempy_tag)
self.current_tag = self.current_tag.childs[-1] # depends on [control=['if'], data=[]] |
def _is_valid_endpoint(endpoint):
"""helper for interval_range to check if start/end are valid types"""
return any([is_number(endpoint),
isinstance(endpoint, Timestamp),
isinstance(endpoint, Timedelta),
endpoint is None]) | def function[_is_valid_endpoint, parameter[endpoint]]:
constant[helper for interval_range to check if start/end are valid types]
return[call[name[any], parameter[list[[<ast.Call object at 0x7da18ede6c80>, <ast.Call object at 0x7da18ede7760>, <ast.Call object at 0x7da18ede5630>, <ast.Compare object at 0x7da18ede4fa0>]]]]] | keyword[def] identifier[_is_valid_endpoint] ( identifier[endpoint] ):
literal[string]
keyword[return] identifier[any] ([ identifier[is_number] ( identifier[endpoint] ),
identifier[isinstance] ( identifier[endpoint] , identifier[Timestamp] ),
identifier[isinstance] ( identifier[endpoint] , identifier[Timedelta] ),
identifier[endpoint] keyword[is] keyword[None] ]) | def _is_valid_endpoint(endpoint):
"""helper for interval_range to check if start/end are valid types"""
return any([is_number(endpoint), isinstance(endpoint, Timestamp), isinstance(endpoint, Timedelta), endpoint is None]) |
def initiate_tasks(self):
""" Loads all tasks using `TaskLoader` from respective configuration option """
self.tasks_classes = TaskLoader().load_tasks(
paths=self.configuration[Configuration.ALGORITHM][Configuration.TASKS][Configuration.PATHS]) | def function[initiate_tasks, parameter[self]]:
constant[ Loads all tasks using `TaskLoader` from respective configuration option ]
name[self].tasks_classes assign[=] call[call[name[TaskLoader], parameter[]].load_tasks, parameter[]] | keyword[def] identifier[initiate_tasks] ( identifier[self] ):
literal[string]
identifier[self] . identifier[tasks_classes] = identifier[TaskLoader] (). identifier[load_tasks] (
identifier[paths] = identifier[self] . identifier[configuration] [ identifier[Configuration] . identifier[ALGORITHM] ][ identifier[Configuration] . identifier[TASKS] ][ identifier[Configuration] . identifier[PATHS] ]) | def initiate_tasks(self):
""" Loads all tasks using `TaskLoader` from respective configuration option """
self.tasks_classes = TaskLoader().load_tasks(paths=self.configuration[Configuration.ALGORITHM][Configuration.TASKS][Configuration.PATHS]) |
def duration(self):
"""Duration of this series in seconds
:type: `~astropy.units.Quantity` scalar
"""
return units.Quantity(self.span[1] - self.span[0], self.xunit,
dtype=float) | def function[duration, parameter[self]]:
constant[Duration of this series in seconds
:type: `~astropy.units.Quantity` scalar
]
return[call[name[units].Quantity, parameter[binary_operation[call[name[self].span][constant[1]] - call[name[self].span][constant[0]]], name[self].xunit]]] | keyword[def] identifier[duration] ( identifier[self] ):
literal[string]
keyword[return] identifier[units] . identifier[Quantity] ( identifier[self] . identifier[span] [ literal[int] ]- identifier[self] . identifier[span] [ literal[int] ], identifier[self] . identifier[xunit] ,
identifier[dtype] = identifier[float] ) | def duration(self):
"""Duration of this series in seconds
:type: `~astropy.units.Quantity` scalar
"""
return units.Quantity(self.span[1] - self.span[0], self.xunit, dtype=float) |
def order_results_by(*fields):
"""A decorator that applies an ordering to the QuerySet returned by a
function.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kw):
result = f(*args, **kw)
return result.order_by(*fields)
return wrapper
return decorator | def function[order_results_by, parameter[]]:
constant[A decorator that applies an ordering to the QuerySet returned by a
function.
]
def function[decorator, parameter[f]]:
def function[wrapper, parameter[]]:
variable[result] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da1b0e32ad0>]]
return[call[name[result].order_by, parameter[<ast.Starred object at 0x7da1b0e31030>]]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[order_results_by] (* identifier[fields] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
@ identifier[functools] . identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kw] ):
identifier[result] = identifier[f] (* identifier[args] ,** identifier[kw] )
keyword[return] identifier[result] . identifier[order_by] (* identifier[fields] )
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def order_results_by(*fields):
"""A decorator that applies an ordering to the QuerySet returned by a
function.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kw):
result = f(*args, **kw)
return result.order_by(*fields)
return wrapper
return decorator |
def process_configs(file_lookup, app_config_format, pipeline_config):
"""Processes the configs from lookup sources.
Args:
file_lookup (FileLookup): Source to look for file/config
app_config_format (str): The format for application config files.
pipeline_config (str): Name/path of the pipeline config
Returns:
dict: Retreived application config
"""
app_configs = collections.defaultdict(dict)
for env in ENVS:
file_json = app_config_format.format(env=env)
try:
env_config = file_lookup.json(filename=file_json)
app_configs[env] = apply_region_configs(env_config)
except FileNotFoundError:
LOG.critical('Application configuration not available for %s.', env)
continue
try:
app_configs['pipeline'] = file_lookup.json(filename=pipeline_config)
except FileNotFoundError:
LOG.warning('Unable to process pipeline.json. Using defaults.')
app_configs['pipeline'] = {'env': ['stage', 'prod']}
LOG.debug('Application configs:\n%s', app_configs)
return app_configs | def function[process_configs, parameter[file_lookup, app_config_format, pipeline_config]]:
constant[Processes the configs from lookup sources.
Args:
file_lookup (FileLookup): Source to look for file/config
app_config_format (str): The format for application config files.
pipeline_config (str): Name/path of the pipeline config
Returns:
dict: Retreived application config
]
variable[app_configs] assign[=] call[name[collections].defaultdict, parameter[name[dict]]]
for taget[name[env]] in starred[name[ENVS]] begin[:]
variable[file_json] assign[=] call[name[app_config_format].format, parameter[]]
<ast.Try object at 0x7da204345510>
<ast.Try object at 0x7da18f58f250>
call[name[LOG].debug, parameter[constant[Application configs:
%s], name[app_configs]]]
return[name[app_configs]] | keyword[def] identifier[process_configs] ( identifier[file_lookup] , identifier[app_config_format] , identifier[pipeline_config] ):
literal[string]
identifier[app_configs] = identifier[collections] . identifier[defaultdict] ( identifier[dict] )
keyword[for] identifier[env] keyword[in] identifier[ENVS] :
identifier[file_json] = identifier[app_config_format] . identifier[format] ( identifier[env] = identifier[env] )
keyword[try] :
identifier[env_config] = identifier[file_lookup] . identifier[json] ( identifier[filename] = identifier[file_json] )
identifier[app_configs] [ identifier[env] ]= identifier[apply_region_configs] ( identifier[env_config] )
keyword[except] identifier[FileNotFoundError] :
identifier[LOG] . identifier[critical] ( literal[string] , identifier[env] )
keyword[continue]
keyword[try] :
identifier[app_configs] [ literal[string] ]= identifier[file_lookup] . identifier[json] ( identifier[filename] = identifier[pipeline_config] )
keyword[except] identifier[FileNotFoundError] :
identifier[LOG] . identifier[warning] ( literal[string] )
identifier[app_configs] [ literal[string] ]={ literal[string] :[ literal[string] , literal[string] ]}
identifier[LOG] . identifier[debug] ( literal[string] , identifier[app_configs] )
keyword[return] identifier[app_configs] | def process_configs(file_lookup, app_config_format, pipeline_config):
"""Processes the configs from lookup sources.
Args:
file_lookup (FileLookup): Source to look for file/config
app_config_format (str): The format for application config files.
pipeline_config (str): Name/path of the pipeline config
Returns:
dict: Retreived application config
"""
app_configs = collections.defaultdict(dict)
for env in ENVS:
file_json = app_config_format.format(env=env)
try:
env_config = file_lookup.json(filename=file_json)
app_configs[env] = apply_region_configs(env_config) # depends on [control=['try'], data=[]]
except FileNotFoundError:
LOG.critical('Application configuration not available for %s.', env)
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['env']]
try:
app_configs['pipeline'] = file_lookup.json(filename=pipeline_config) # depends on [control=['try'], data=[]]
except FileNotFoundError:
LOG.warning('Unable to process pipeline.json. Using defaults.')
app_configs['pipeline'] = {'env': ['stage', 'prod']} # depends on [control=['except'], data=[]]
LOG.debug('Application configs:\n%s', app_configs)
return app_configs |
def bin_hash160Bytes(bts):
"""
Get a hash of the provided message using the ripemd160 algorithm.
Args:
bts (str): message to hash.
Returns:
bytes: hash.
"""
intermed = hashlib.sha256(bts).digest()
return hashlib.new('ripemd160', intermed).digest() | def function[bin_hash160Bytes, parameter[bts]]:
constant[
Get a hash of the provided message using the ripemd160 algorithm.
Args:
bts (str): message to hash.
Returns:
bytes: hash.
]
variable[intermed] assign[=] call[call[name[hashlib].sha256, parameter[name[bts]]].digest, parameter[]]
return[call[call[name[hashlib].new, parameter[constant[ripemd160], name[intermed]]].digest, parameter[]]] | keyword[def] identifier[bin_hash160Bytes] ( identifier[bts] ):
literal[string]
identifier[intermed] = identifier[hashlib] . identifier[sha256] ( identifier[bts] ). identifier[digest] ()
keyword[return] identifier[hashlib] . identifier[new] ( literal[string] , identifier[intermed] ). identifier[digest] () | def bin_hash160Bytes(bts):
"""
Get a hash of the provided message using the ripemd160 algorithm.
Args:
bts (str): message to hash.
Returns:
bytes: hash.
"""
intermed = hashlib.sha256(bts).digest()
return hashlib.new('ripemd160', intermed).digest() |
def index(self, solr, collection, threads=1, send_method='stream_file', **kwargs):
'''
Will index the queue into a specified solr instance and collection. Specify multiple threads to make this faster, however keep in mind that if you specify multiple threads the items may not be in order.
Example::
solr = SolrClient('http://localhost:8983/solr/')
for doc in self.docs:
index.add(doc, finalize=True)
index.index(solr,'SolrClient_unittest')
:param object solr: SolrClient object.
:param string collection: The name of the collection to index document into.
:param int threads: Number of simultaneous threads to spin up for indexing.
:param string send_method: SolrClient method to execute for indexing. Default is stream_file
'''
try:
method = getattr(solr, send_method)
except AttributeError:
raise AttributeError("Couldn't find the send_method. Specify either stream_file or local_index")
self.logger.info("Indexing {} into {} using {}".format(self._queue_name,
collection,
send_method))
if threads > 1:
if hasattr(collection, '__call__'):
self.logger.debug("Overwriting send_method to index_json")
method = getattr(solr, 'index_json')
method = partial(self._wrap_dynamic, method, collection)
else:
method = partial(self._wrap, method, collection)
with ThreadPool(threads) as p:
p.map(method, self.get_todo_items())
else:
for todo_file in self.get_todo_items():
try:
result = method(collection, todo_file)
if result:
self.complete(todo_file)
except SolrError:
self.logger.error("Error Indexing Item: {}".format(todo_file))
self._unlock()
raise | def function[index, parameter[self, solr, collection, threads, send_method]]:
constant[
Will index the queue into a specified solr instance and collection. Specify multiple threads to make this faster, however keep in mind that if you specify multiple threads the items may not be in order.
Example::
solr = SolrClient('http://localhost:8983/solr/')
for doc in self.docs:
index.add(doc, finalize=True)
index.index(solr,'SolrClient_unittest')
:param object solr: SolrClient object.
:param string collection: The name of the collection to index document into.
:param int threads: Number of simultaneous threads to spin up for indexing.
:param string send_method: SolrClient method to execute for indexing. Default is stream_file
]
<ast.Try object at 0x7da20c7cb520>
call[name[self].logger.info, parameter[call[constant[Indexing {} into {} using {}].format, parameter[name[self]._queue_name, name[collection], name[send_method]]]]]
if compare[name[threads] greater[>] constant[1]] begin[:]
if call[name[hasattr], parameter[name[collection], constant[__call__]]] begin[:]
call[name[self].logger.debug, parameter[constant[Overwriting send_method to index_json]]]
variable[method] assign[=] call[name[getattr], parameter[name[solr], constant[index_json]]]
variable[method] assign[=] call[name[partial], parameter[name[self]._wrap_dynamic, name[method], name[collection]]]
with call[name[ThreadPool], parameter[name[threads]]] begin[:]
call[name[p].map, parameter[name[method], call[name[self].get_todo_items, parameter[]]]] | keyword[def] identifier[index] ( identifier[self] , identifier[solr] , identifier[collection] , identifier[threads] = literal[int] , identifier[send_method] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[method] = identifier[getattr] ( identifier[solr] , identifier[send_method] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[_queue_name] ,
identifier[collection] ,
identifier[send_method] ))
keyword[if] identifier[threads] > literal[int] :
keyword[if] identifier[hasattr] ( identifier[collection] , literal[string] ):
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[method] = identifier[getattr] ( identifier[solr] , literal[string] )
identifier[method] = identifier[partial] ( identifier[self] . identifier[_wrap_dynamic] , identifier[method] , identifier[collection] )
keyword[else] :
identifier[method] = identifier[partial] ( identifier[self] . identifier[_wrap] , identifier[method] , identifier[collection] )
keyword[with] identifier[ThreadPool] ( identifier[threads] ) keyword[as] identifier[p] :
identifier[p] . identifier[map] ( identifier[method] , identifier[self] . identifier[get_todo_items] ())
keyword[else] :
keyword[for] identifier[todo_file] keyword[in] identifier[self] . identifier[get_todo_items] ():
keyword[try] :
identifier[result] = identifier[method] ( identifier[collection] , identifier[todo_file] )
keyword[if] identifier[result] :
identifier[self] . identifier[complete] ( identifier[todo_file] )
keyword[except] identifier[SolrError] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[todo_file] ))
identifier[self] . identifier[_unlock] ()
keyword[raise] | def index(self, solr, collection, threads=1, send_method='stream_file', **kwargs):
"""
Will index the queue into a specified solr instance and collection. Specify multiple threads to make this faster, however keep in mind that if you specify multiple threads the items may not be in order.
Example::
solr = SolrClient('http://localhost:8983/solr/')
for doc in self.docs:
index.add(doc, finalize=True)
index.index(solr,'SolrClient_unittest')
:param object solr: SolrClient object.
:param string collection: The name of the collection to index document into.
:param int threads: Number of simultaneous threads to spin up for indexing.
:param string send_method: SolrClient method to execute for indexing. Default is stream_file
"""
try:
method = getattr(solr, send_method) # depends on [control=['try'], data=[]]
except AttributeError:
raise AttributeError("Couldn't find the send_method. Specify either stream_file or local_index") # depends on [control=['except'], data=[]]
self.logger.info('Indexing {} into {} using {}'.format(self._queue_name, collection, send_method))
if threads > 1:
if hasattr(collection, '__call__'):
self.logger.debug('Overwriting send_method to index_json')
method = getattr(solr, 'index_json')
method = partial(self._wrap_dynamic, method, collection) # depends on [control=['if'], data=[]]
else:
method = partial(self._wrap, method, collection)
with ThreadPool(threads) as p:
p.map(method, self.get_todo_items()) # depends on [control=['with'], data=['p']] # depends on [control=['if'], data=['threads']]
else:
for todo_file in self.get_todo_items():
try:
result = method(collection, todo_file)
if result:
self.complete(todo_file) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except SolrError:
self.logger.error('Error Indexing Item: {}'.format(todo_file))
self._unlock()
raise # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['todo_file']] |
def validate_capacity(self, servers):
"""Validates if a deploy can be fulfilled.
"""
try:
return self.manager.validate_capacity(servers)
except packet.baseapi.Error as msg:
raise PacketManagerException(msg) | def function[validate_capacity, parameter[self, servers]]:
constant[Validates if a deploy can be fulfilled.
]
<ast.Try object at 0x7da1b13abf10> | keyword[def] identifier[validate_capacity] ( identifier[self] , identifier[servers] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[manager] . identifier[validate_capacity] ( identifier[servers] )
keyword[except] identifier[packet] . identifier[baseapi] . identifier[Error] keyword[as] identifier[msg] :
keyword[raise] identifier[PacketManagerException] ( identifier[msg] ) | def validate_capacity(self, servers):
"""Validates if a deploy can be fulfilled.
"""
try:
return self.manager.validate_capacity(servers) # depends on [control=['try'], data=[]]
except packet.baseapi.Error as msg:
raise PacketManagerException(msg) # depends on [control=['except'], data=['msg']] |
def append_child(self, name):
"""Append a child element with the specified name."""
return XMLElement(lib.lsl_append_child(self.e, str.encode(name))) | def function[append_child, parameter[self, name]]:
constant[Append a child element with the specified name.]
return[call[name[XMLElement], parameter[call[name[lib].lsl_append_child, parameter[name[self].e, call[name[str].encode, parameter[name[name]]]]]]]] | keyword[def] identifier[append_child] ( identifier[self] , identifier[name] ):
literal[string]
keyword[return] identifier[XMLElement] ( identifier[lib] . identifier[lsl_append_child] ( identifier[self] . identifier[e] , identifier[str] . identifier[encode] ( identifier[name] ))) | def append_child(self, name):
"""Append a child element with the specified name."""
return XMLElement(lib.lsl_append_child(self.e, str.encode(name))) |
def to_simple_model(self, instance, **options): # noqa
""" Convert model to simple python structure.
"""
options = self.init_options(**options)
fields, include, exclude, related = options['fields'], options['include'], options['exclude'], options['related'] # noqa
result = dict(
model=smart_unicode(instance._meta),
pk=smart_unicode(
instance._get_pk_val(), strings_only=True),
fields=dict(),
)
default_fields = set([field.name for field in instance._meta.fields if field.serialize])
serialized_fields = fields or (default_fields | include) - exclude
for fname in serialized_fields:
# Respect `to_simple__<fname>`
to_simple = getattr(
self.scheme, 'to_simple__{0}'.format(fname), None)
if to_simple:
result['fields'][fname] = to_simple(instance, serializer=self)
continue
related_options = related.get(fname, dict())
if related_options:
related_options = self.init_options(**related_options)
if fname in default_fields and not related_options:
field = instance._meta.get_field(fname)
value = field.value_from_object(instance)
else:
value = getattr(instance, fname, None)
if isinstance(value, Manager):
value = value.all()
result['fields'][fname] = self.to_simple(
value, **related_options)
if self.format != 'django':
fields = result['fields']
fields['id'] = result['pk']
result = fields
return result | def function[to_simple_model, parameter[self, instance]]:
constant[ Convert model to simple python structure.
]
variable[options] assign[=] call[name[self].init_options, parameter[]]
<ast.Tuple object at 0x7da18eb54880> assign[=] tuple[[<ast.Subscript object at 0x7da20e955ab0>, <ast.Subscript object at 0x7da20e957340>, <ast.Subscript object at 0x7da20e956e30>, <ast.Subscript object at 0x7da20e956da0>]]
variable[result] assign[=] call[name[dict], parameter[]]
variable[default_fields] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da20e9548b0>]]
variable[serialized_fields] assign[=] <ast.BoolOp object at 0x7da20e954f10>
for taget[name[fname]] in starred[name[serialized_fields]] begin[:]
variable[to_simple] assign[=] call[name[getattr], parameter[name[self].scheme, call[constant[to_simple__{0}].format, parameter[name[fname]]], constant[None]]]
if name[to_simple] begin[:]
call[call[name[result]][constant[fields]]][name[fname]] assign[=] call[name[to_simple], parameter[name[instance]]]
continue
variable[related_options] assign[=] call[name[related].get, parameter[name[fname], call[name[dict], parameter[]]]]
if name[related_options] begin[:]
variable[related_options] assign[=] call[name[self].init_options, parameter[]]
if <ast.BoolOp object at 0x7da207f99b10> begin[:]
variable[field] assign[=] call[name[instance]._meta.get_field, parameter[name[fname]]]
variable[value] assign[=] call[name[field].value_from_object, parameter[name[instance]]]
call[call[name[result]][constant[fields]]][name[fname]] assign[=] call[name[self].to_simple, parameter[name[value]]]
if compare[name[self].format not_equal[!=] constant[django]] begin[:]
variable[fields] assign[=] call[name[result]][constant[fields]]
call[name[fields]][constant[id]] assign[=] call[name[result]][constant[pk]]
variable[result] assign[=] name[fields]
return[name[result]] | keyword[def] identifier[to_simple_model] ( identifier[self] , identifier[instance] ,** identifier[options] ):
literal[string]
identifier[options] = identifier[self] . identifier[init_options] (** identifier[options] )
identifier[fields] , identifier[include] , identifier[exclude] , identifier[related] = identifier[options] [ literal[string] ], identifier[options] [ literal[string] ], identifier[options] [ literal[string] ], identifier[options] [ literal[string] ]
identifier[result] = identifier[dict] (
identifier[model] = identifier[smart_unicode] ( identifier[instance] . identifier[_meta] ),
identifier[pk] = identifier[smart_unicode] (
identifier[instance] . identifier[_get_pk_val] (), identifier[strings_only] = keyword[True] ),
identifier[fields] = identifier[dict] (),
)
identifier[default_fields] = identifier[set] ([ identifier[field] . identifier[name] keyword[for] identifier[field] keyword[in] identifier[instance] . identifier[_meta] . identifier[fields] keyword[if] identifier[field] . identifier[serialize] ])
identifier[serialized_fields] = identifier[fields] keyword[or] ( identifier[default_fields] | identifier[include] )- identifier[exclude]
keyword[for] identifier[fname] keyword[in] identifier[serialized_fields] :
identifier[to_simple] = identifier[getattr] (
identifier[self] . identifier[scheme] , literal[string] . identifier[format] ( identifier[fname] ), keyword[None] )
keyword[if] identifier[to_simple] :
identifier[result] [ literal[string] ][ identifier[fname] ]= identifier[to_simple] ( identifier[instance] , identifier[serializer] = identifier[self] )
keyword[continue]
identifier[related_options] = identifier[related] . identifier[get] ( identifier[fname] , identifier[dict] ())
keyword[if] identifier[related_options] :
identifier[related_options] = identifier[self] . identifier[init_options] (** identifier[related_options] )
keyword[if] identifier[fname] keyword[in] identifier[default_fields] keyword[and] keyword[not] identifier[related_options] :
identifier[field] = identifier[instance] . identifier[_meta] . identifier[get_field] ( identifier[fname] )
identifier[value] = identifier[field] . identifier[value_from_object] ( identifier[instance] )
keyword[else] :
identifier[value] = identifier[getattr] ( identifier[instance] , identifier[fname] , keyword[None] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Manager] ):
identifier[value] = identifier[value] . identifier[all] ()
identifier[result] [ literal[string] ][ identifier[fname] ]= identifier[self] . identifier[to_simple] (
identifier[value] ,** identifier[related_options] )
keyword[if] identifier[self] . identifier[format] != literal[string] :
identifier[fields] = identifier[result] [ literal[string] ]
identifier[fields] [ literal[string] ]= identifier[result] [ literal[string] ]
identifier[result] = identifier[fields]
keyword[return] identifier[result] | def to_simple_model(self, instance, **options): # noqa
' Convert model to simple python structure.\n '
options = self.init_options(**options)
(fields, include, exclude, related) = (options['fields'], options['include'], options['exclude'], options['related']) # noqa
result = dict(model=smart_unicode(instance._meta), pk=smart_unicode(instance._get_pk_val(), strings_only=True), fields=dict())
default_fields = set([field.name for field in instance._meta.fields if field.serialize])
serialized_fields = fields or (default_fields | include) - exclude
for fname in serialized_fields:
# Respect `to_simple__<fname>`
to_simple = getattr(self.scheme, 'to_simple__{0}'.format(fname), None)
if to_simple:
result['fields'][fname] = to_simple(instance, serializer=self)
continue # depends on [control=['if'], data=[]]
related_options = related.get(fname, dict())
if related_options:
related_options = self.init_options(**related_options) # depends on [control=['if'], data=[]]
if fname in default_fields and (not related_options):
field = instance._meta.get_field(fname)
value = field.value_from_object(instance) # depends on [control=['if'], data=[]]
else:
value = getattr(instance, fname, None)
if isinstance(value, Manager):
value = value.all() # depends on [control=['if'], data=[]]
result['fields'][fname] = self.to_simple(value, **related_options) # depends on [control=['for'], data=['fname']]
if self.format != 'django':
fields = result['fields']
fields['id'] = result['pk']
result = fields # depends on [control=['if'], data=[]]
return result |
def from_error(exc_info, json_encoder, debug_url=None):
"""Wraps another Exception in an InternalError.
:param exc_info: The exception info for the wrapped exception
:type exc_info: (type, object, traceback)
:type json_encoder: json.JSONEncoder
:type debug_url: str | None
:rtype: InternalError
.. versionadded:: 0.1.0
.. versionchanged:: 0.2.0
Stringifies non-JSON-serializable objects
"""
exc = exc_info[1]
data = exc.__dict__.copy()
for key, value in data.items():
try:
json_encoder.encode(value)
except TypeError:
data[key] = repr(value)
data["traceback"] = "".join(traceback.format_exception(*exc_info))
if debug_url is not None:
data["debug_url"] = debug_url
return InternalError(data) | def function[from_error, parameter[exc_info, json_encoder, debug_url]]:
constant[Wraps another Exception in an InternalError.
:param exc_info: The exception info for the wrapped exception
:type exc_info: (type, object, traceback)
:type json_encoder: json.JSONEncoder
:type debug_url: str | None
:rtype: InternalError
.. versionadded:: 0.1.0
.. versionchanged:: 0.2.0
Stringifies non-JSON-serializable objects
]
variable[exc] assign[=] call[name[exc_info]][constant[1]]
variable[data] assign[=] call[name[exc].__dict__.copy, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c990c40>, <ast.Name object at 0x7da20c991690>]]] in starred[call[name[data].items, parameter[]]] begin[:]
<ast.Try object at 0x7da20c990190>
call[name[data]][constant[traceback]] assign[=] call[constant[].join, parameter[call[name[traceback].format_exception, parameter[<ast.Starred object at 0x7da20c991c90>]]]]
if compare[name[debug_url] is_not constant[None]] begin[:]
call[name[data]][constant[debug_url]] assign[=] name[debug_url]
return[call[name[InternalError], parameter[name[data]]]] | keyword[def] identifier[from_error] ( identifier[exc_info] , identifier[json_encoder] , identifier[debug_url] = keyword[None] ):
literal[string]
identifier[exc] = identifier[exc_info] [ literal[int] ]
identifier[data] = identifier[exc] . identifier[__dict__] . identifier[copy] ()
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[items] ():
keyword[try] :
identifier[json_encoder] . identifier[encode] ( identifier[value] )
keyword[except] identifier[TypeError] :
identifier[data] [ identifier[key] ]= identifier[repr] ( identifier[value] )
identifier[data] [ literal[string] ]= literal[string] . identifier[join] ( identifier[traceback] . identifier[format_exception] (* identifier[exc_info] ))
keyword[if] identifier[debug_url] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[debug_url]
keyword[return] identifier[InternalError] ( identifier[data] ) | def from_error(exc_info, json_encoder, debug_url=None):
"""Wraps another Exception in an InternalError.
:param exc_info: The exception info for the wrapped exception
:type exc_info: (type, object, traceback)
:type json_encoder: json.JSONEncoder
:type debug_url: str | None
:rtype: InternalError
.. versionadded:: 0.1.0
.. versionchanged:: 0.2.0
Stringifies non-JSON-serializable objects
"""
exc = exc_info[1]
data = exc.__dict__.copy()
for (key, value) in data.items():
try:
json_encoder.encode(value) # depends on [control=['try'], data=[]]
except TypeError:
data[key] = repr(value) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
data['traceback'] = ''.join(traceback.format_exception(*exc_info))
if debug_url is not None:
data['debug_url'] = debug_url # depends on [control=['if'], data=['debug_url']]
return InternalError(data) |
def clear(self):
"""
removes all items from this manifest, and clears and removes all
sub-sections
"""
for sub in self.sub_sections.values():
sub.clear()
self.sub_sections.clear()
ManifestSection.clear(self) | def function[clear, parameter[self]]:
constant[
removes all items from this manifest, and clears and removes all
sub-sections
]
for taget[name[sub]] in starred[call[name[self].sub_sections.values, parameter[]]] begin[:]
call[name[sub].clear, parameter[]]
call[name[self].sub_sections.clear, parameter[]]
call[name[ManifestSection].clear, parameter[name[self]]] | keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
keyword[for] identifier[sub] keyword[in] identifier[self] . identifier[sub_sections] . identifier[values] ():
identifier[sub] . identifier[clear] ()
identifier[self] . identifier[sub_sections] . identifier[clear] ()
identifier[ManifestSection] . identifier[clear] ( identifier[self] ) | def clear(self):
"""
removes all items from this manifest, and clears and removes all
sub-sections
"""
for sub in self.sub_sections.values():
sub.clear() # depends on [control=['for'], data=['sub']]
self.sub_sections.clear()
ManifestSection.clear(self) |
def from_composition_and_entries(comp, entries_in_chemsys,
working_ion_symbol="Li"):
"""
Convenience constructor to make a ConversionElectrode from a
composition and all entries in a chemical system.
Args:
comp: Starting composition for ConversionElectrode, e.g.,
Composition("FeF3")
entries_in_chemsys: Sequence containing all entries in a
chemical system. E.g., all Li-Fe-F containing entries.
working_ion_symbol: Element symbol of working ion. Defaults to Li.
"""
pd = PhaseDiagram(entries_in_chemsys)
return ConversionElectrode.from_composition_and_pd(comp, pd,
working_ion_symbol) | def function[from_composition_and_entries, parameter[comp, entries_in_chemsys, working_ion_symbol]]:
constant[
Convenience constructor to make a ConversionElectrode from a
composition and all entries in a chemical system.
Args:
comp: Starting composition for ConversionElectrode, e.g.,
Composition("FeF3")
entries_in_chemsys: Sequence containing all entries in a
chemical system. E.g., all Li-Fe-F containing entries.
working_ion_symbol: Element symbol of working ion. Defaults to Li.
]
variable[pd] assign[=] call[name[PhaseDiagram], parameter[name[entries_in_chemsys]]]
return[call[name[ConversionElectrode].from_composition_and_pd, parameter[name[comp], name[pd], name[working_ion_symbol]]]] | keyword[def] identifier[from_composition_and_entries] ( identifier[comp] , identifier[entries_in_chemsys] ,
identifier[working_ion_symbol] = literal[string] ):
literal[string]
identifier[pd] = identifier[PhaseDiagram] ( identifier[entries_in_chemsys] )
keyword[return] identifier[ConversionElectrode] . identifier[from_composition_and_pd] ( identifier[comp] , identifier[pd] ,
identifier[working_ion_symbol] ) | def from_composition_and_entries(comp, entries_in_chemsys, working_ion_symbol='Li'):
"""
Convenience constructor to make a ConversionElectrode from a
composition and all entries in a chemical system.
Args:
comp: Starting composition for ConversionElectrode, e.g.,
Composition("FeF3")
entries_in_chemsys: Sequence containing all entries in a
chemical system. E.g., all Li-Fe-F containing entries.
working_ion_symbol: Element symbol of working ion. Defaults to Li.
"""
pd = PhaseDiagram(entries_in_chemsys)
return ConversionElectrode.from_composition_and_pd(comp, pd, working_ion_symbol) |
def authorizer(self, schemes, resource, action, request_args):
"""Construct the Authorization header for a request.
Args:
schemes (list of str): Authentication schemes supported for the
requested action.
resource (str): Object upon which an action is being performed.
action (str): Action being performed.
request_args (list of str): Arguments passed to the action call.
Returns:
(str, str) A tuple of the auth scheme satisfied, and the credential
for the Authorization header or empty strings if none could be
satisfied.
"""
if not schemes:
return u'', u''
for scheme in schemes:
if scheme in self.schemes and self.has_auth_params(scheme):
cred = Context.format_auth_params(self.schemes[scheme][u'params'])
if hasattr(self, 'mfa_token'):
cred = '{}, mfa_token="{}"'.format(cred, self.mfa_token)
return scheme, cred
raise AuthenticationError(self, schemes) | def function[authorizer, parameter[self, schemes, resource, action, request_args]]:
constant[Construct the Authorization header for a request.
Args:
schemes (list of str): Authentication schemes supported for the
requested action.
resource (str): Object upon which an action is being performed.
action (str): Action being performed.
request_args (list of str): Arguments passed to the action call.
Returns:
(str, str) A tuple of the auth scheme satisfied, and the credential
for the Authorization header or empty strings if none could be
satisfied.
]
if <ast.UnaryOp object at 0x7da2054a60b0> begin[:]
return[tuple[[<ast.Constant object at 0x7da2054a5300>, <ast.Constant object at 0x7da2054a4e80>]]]
for taget[name[scheme]] in starred[name[schemes]] begin[:]
if <ast.BoolOp object at 0x7da2054a4520> begin[:]
variable[cred] assign[=] call[name[Context].format_auth_params, parameter[call[call[name[self].schemes][name[scheme]]][constant[params]]]]
if call[name[hasattr], parameter[name[self], constant[mfa_token]]] begin[:]
variable[cred] assign[=] call[constant[{}, mfa_token="{}"].format, parameter[name[cred], name[self].mfa_token]]
return[tuple[[<ast.Name object at 0x7da18fe93850>, <ast.Name object at 0x7da18fe937c0>]]]
<ast.Raise object at 0x7da18fe93af0> | keyword[def] identifier[authorizer] ( identifier[self] , identifier[schemes] , identifier[resource] , identifier[action] , identifier[request_args] ):
literal[string]
keyword[if] keyword[not] identifier[schemes] :
keyword[return] literal[string] , literal[string]
keyword[for] identifier[scheme] keyword[in] identifier[schemes] :
keyword[if] identifier[scheme] keyword[in] identifier[self] . identifier[schemes] keyword[and] identifier[self] . identifier[has_auth_params] ( identifier[scheme] ):
identifier[cred] = identifier[Context] . identifier[format_auth_params] ( identifier[self] . identifier[schemes] [ identifier[scheme] ][ literal[string] ])
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[cred] = literal[string] . identifier[format] ( identifier[cred] , identifier[self] . identifier[mfa_token] )
keyword[return] identifier[scheme] , identifier[cred]
keyword[raise] identifier[AuthenticationError] ( identifier[self] , identifier[schemes] ) | def authorizer(self, schemes, resource, action, request_args):
"""Construct the Authorization header for a request.
Args:
schemes (list of str): Authentication schemes supported for the
requested action.
resource (str): Object upon which an action is being performed.
action (str): Action being performed.
request_args (list of str): Arguments passed to the action call.
Returns:
(str, str) A tuple of the auth scheme satisfied, and the credential
for the Authorization header or empty strings if none could be
satisfied.
"""
if not schemes:
return (u'', u'') # depends on [control=['if'], data=[]]
for scheme in schemes:
if scheme in self.schemes and self.has_auth_params(scheme):
cred = Context.format_auth_params(self.schemes[scheme][u'params'])
if hasattr(self, 'mfa_token'):
cred = '{}, mfa_token="{}"'.format(cred, self.mfa_token) # depends on [control=['if'], data=[]]
return (scheme, cred) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['scheme']]
raise AuthenticationError(self, schemes) |
def get_item_name(self, item, parent):
""" Returns the value of the first name element found inside of element """
names = self.get_name_elements(item)
if not names:
raise MissingNameElementError
name = names[0].text
prefix = self.item_name_prefix(parent)
if prefix:
name = prefix + name
return name | def function[get_item_name, parameter[self, item, parent]]:
constant[ Returns the value of the first name element found inside of element ]
variable[names] assign[=] call[name[self].get_name_elements, parameter[name[item]]]
if <ast.UnaryOp object at 0x7da1b1507280> begin[:]
<ast.Raise object at 0x7da1b1507eb0>
variable[name] assign[=] call[name[names]][constant[0]].text
variable[prefix] assign[=] call[name[self].item_name_prefix, parameter[name[parent]]]
if name[prefix] begin[:]
variable[name] assign[=] binary_operation[name[prefix] + name[name]]
return[name[name]] | keyword[def] identifier[get_item_name] ( identifier[self] , identifier[item] , identifier[parent] ):
literal[string]
identifier[names] = identifier[self] . identifier[get_name_elements] ( identifier[item] )
keyword[if] keyword[not] identifier[names] :
keyword[raise] identifier[MissingNameElementError]
identifier[name] = identifier[names] [ literal[int] ]. identifier[text]
identifier[prefix] = identifier[self] . identifier[item_name_prefix] ( identifier[parent] )
keyword[if] identifier[prefix] :
identifier[name] = identifier[prefix] + identifier[name]
keyword[return] identifier[name] | def get_item_name(self, item, parent):
""" Returns the value of the first name element found inside of element """
names = self.get_name_elements(item)
if not names:
raise MissingNameElementError # depends on [control=['if'], data=[]]
name = names[0].text
prefix = self.item_name_prefix(parent)
if prefix:
name = prefix + name # depends on [control=['if'], data=[]]
return name |
def install_table(self, connection, table, logger = None):
""" Installs all partitons of the table and create view with union of all partitons.
Args:
connection: connection to database who stores mpr data.
table (orm.Table):
"""
# first install all partitions of the table
queries = []
query_tmpl = 'SELECT * FROM {}'
for partition in table.partitions:
partition.localize()
installed_name = self.install(connection, partition)
queries.append(query_tmpl.format(installed_name))
# now create view with union of all partitions.
query = 'CREATE VIEW {} AS {} '.format( table.vid, '\nUNION ALL\n'.join(queries))
logger.debug('Creating view for table.\n table: {}\n query: {}'.format(table.vid, query))
self._execute(connection, query, fetch=False) | def function[install_table, parameter[self, connection, table, logger]]:
constant[ Installs all partitons of the table and create view with union of all partitons.
Args:
connection: connection to database who stores mpr data.
table (orm.Table):
]
variable[queries] assign[=] list[[]]
variable[query_tmpl] assign[=] constant[SELECT * FROM {}]
for taget[name[partition]] in starred[name[table].partitions] begin[:]
call[name[partition].localize, parameter[]]
variable[installed_name] assign[=] call[name[self].install, parameter[name[connection], name[partition]]]
call[name[queries].append, parameter[call[name[query_tmpl].format, parameter[name[installed_name]]]]]
variable[query] assign[=] call[constant[CREATE VIEW {} AS {} ].format, parameter[name[table].vid, call[constant[
UNION ALL
].join, parameter[name[queries]]]]]
call[name[logger].debug, parameter[call[constant[Creating view for table.
table: {}
query: {}].format, parameter[name[table].vid, name[query]]]]]
call[name[self]._execute, parameter[name[connection], name[query]]] | keyword[def] identifier[install_table] ( identifier[self] , identifier[connection] , identifier[table] , identifier[logger] = keyword[None] ):
literal[string]
identifier[queries] =[]
identifier[query_tmpl] = literal[string]
keyword[for] identifier[partition] keyword[in] identifier[table] . identifier[partitions] :
identifier[partition] . identifier[localize] ()
identifier[installed_name] = identifier[self] . identifier[install] ( identifier[connection] , identifier[partition] )
identifier[queries] . identifier[append] ( identifier[query_tmpl] . identifier[format] ( identifier[installed_name] ))
identifier[query] = literal[string] . identifier[format] ( identifier[table] . identifier[vid] , literal[string] . identifier[join] ( identifier[queries] ))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[table] . identifier[vid] , identifier[query] ))
identifier[self] . identifier[_execute] ( identifier[connection] , identifier[query] , identifier[fetch] = keyword[False] ) | def install_table(self, connection, table, logger=None):
""" Installs all partitons of the table and create view with union of all partitons.
Args:
connection: connection to database who stores mpr data.
table (orm.Table):
"""
# first install all partitions of the table
queries = []
query_tmpl = 'SELECT * FROM {}'
for partition in table.partitions:
partition.localize()
installed_name = self.install(connection, partition)
queries.append(query_tmpl.format(installed_name)) # depends on [control=['for'], data=['partition']]
# now create view with union of all partitions.
query = 'CREATE VIEW {} AS {} '.format(table.vid, '\nUNION ALL\n'.join(queries))
logger.debug('Creating view for table.\n table: {}\n query: {}'.format(table.vid, query))
self._execute(connection, query, fetch=False) |
def salsa20_8(B, x, src, s_start, dest, d_start):
"""Salsa20/8 http://en.wikipedia.org/wiki/Salsa20"""
# Merged blockxor for speed
for i in xrange(16):
x[i] = B[i] = B[i] ^ src[s_start + i]
# This is the actual Salsa 20/8: four identical double rounds
for i in xrange(4):
R(x, 4, 0,12, 7);R(x, 8, 4, 0, 9);R(x,12, 8, 4,13);R(x, 0,12, 8,18)
R(x, 9, 5, 1, 7);R(x,13, 9, 5, 9);R(x, 1,13, 9,13);R(x, 5, 1,13,18)
R(x,14,10, 6, 7);R(x, 2,14,10, 9);R(x, 6, 2,14,13);R(x,10, 6, 2,18)
R(x, 3,15,11, 7);R(x, 7, 3,15, 9);R(x,11, 7, 3,13);R(x,15,11, 7,18)
R(x, 1, 0, 3, 7);R(x, 2, 1, 0, 9);R(x, 3, 2, 1,13);R(x, 0, 3, 2,18)
R(x, 6, 5, 4, 7);R(x, 7, 6, 5, 9);R(x, 4, 7, 6,13);R(x, 5, 4, 7,18)
R(x,11,10, 9, 7);R(x, 8,11,10, 9);R(x, 9, 8,11,13);R(x,10, 9, 8,18)
R(x,12,15,14, 7);R(x,13,12,15, 9);R(x,14,13,12,13);R(x,15,14,13,18)
# While we are handling the data, write it to the correct dest.
# The latter half is still part of salsa20
for i in xrange(16):
dest[d_start + i] = B[i] = (x[i] + B[i]) & 0xffffffff | def function[salsa20_8, parameter[B, x, src, s_start, dest, d_start]]:
constant[Salsa20/8 http://en.wikipedia.org/wiki/Salsa20]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[16]]]] begin[:]
call[name[x]][name[i]] assign[=] binary_operation[call[name[B]][name[i]] <ast.BitXor object at 0x7da2590d6b00> call[name[src]][binary_operation[name[s_start] + name[i]]]]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[4]]]] begin[:]
call[name[R], parameter[name[x], constant[4], constant[0], constant[12], constant[7]]]
call[name[R], parameter[name[x], constant[8], constant[4], constant[0], constant[9]]]
call[name[R], parameter[name[x], constant[12], constant[8], constant[4], constant[13]]]
call[name[R], parameter[name[x], constant[0], constant[12], constant[8], constant[18]]]
call[name[R], parameter[name[x], constant[9], constant[5], constant[1], constant[7]]]
call[name[R], parameter[name[x], constant[13], constant[9], constant[5], constant[9]]]
call[name[R], parameter[name[x], constant[1], constant[13], constant[9], constant[13]]]
call[name[R], parameter[name[x], constant[5], constant[1], constant[13], constant[18]]]
call[name[R], parameter[name[x], constant[14], constant[10], constant[6], constant[7]]]
call[name[R], parameter[name[x], constant[2], constant[14], constant[10], constant[9]]]
call[name[R], parameter[name[x], constant[6], constant[2], constant[14], constant[13]]]
call[name[R], parameter[name[x], constant[10], constant[6], constant[2], constant[18]]]
call[name[R], parameter[name[x], constant[3], constant[15], constant[11], constant[7]]]
call[name[R], parameter[name[x], constant[7], constant[3], constant[15], constant[9]]]
call[name[R], parameter[name[x], constant[11], constant[7], constant[3], constant[13]]]
call[name[R], parameter[name[x], constant[15], constant[11], constant[7], constant[18]]]
call[name[R], parameter[name[x], constant[1], constant[0], constant[3], constant[7]]]
call[name[R], parameter[name[x], constant[2], constant[1], constant[0], constant[9]]]
call[name[R], parameter[name[x], constant[3], constant[2], constant[1], constant[13]]]
call[name[R], parameter[name[x], constant[0], constant[3], constant[2], constant[18]]]
call[name[R], parameter[name[x], constant[6], constant[5], constant[4], constant[7]]]
call[name[R], parameter[name[x], constant[7], constant[6], constant[5], constant[9]]]
call[name[R], parameter[name[x], constant[4], constant[7], constant[6], constant[13]]]
call[name[R], parameter[name[x], constant[5], constant[4], constant[7], constant[18]]]
call[name[R], parameter[name[x], constant[11], constant[10], constant[9], constant[7]]]
call[name[R], parameter[name[x], constant[8], constant[11], constant[10], constant[9]]]
call[name[R], parameter[name[x], constant[9], constant[8], constant[11], constant[13]]]
call[name[R], parameter[name[x], constant[10], constant[9], constant[8], constant[18]]]
call[name[R], parameter[name[x], constant[12], constant[15], constant[14], constant[7]]]
call[name[R], parameter[name[x], constant[13], constant[12], constant[15], constant[9]]]
call[name[R], parameter[name[x], constant[14], constant[13], constant[12], constant[13]]]
call[name[R], parameter[name[x], constant[15], constant[14], constant[13], constant[18]]]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[16]]]] begin[:]
call[name[dest]][binary_operation[name[d_start] + name[i]]] assign[=] binary_operation[binary_operation[call[name[x]][name[i]] + call[name[B]][name[i]]] <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]] | keyword[def] identifier[salsa20_8] ( identifier[B] , identifier[x] , identifier[src] , identifier[s_start] , identifier[dest] , identifier[d_start] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] ):
identifier[x] [ identifier[i] ]= identifier[B] [ identifier[i] ]= identifier[B] [ identifier[i] ]^ identifier[src] [ identifier[s_start] + identifier[i] ]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] ):
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] ); identifier[R] ( identifier[x] , literal[int] , literal[int] , literal[int] , literal[int] )
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] ):
identifier[dest] [ identifier[d_start] + identifier[i] ]= identifier[B] [ identifier[i] ]=( identifier[x] [ identifier[i] ]+ identifier[B] [ identifier[i] ])& literal[int] | def salsa20_8(B, x, src, s_start, dest, d_start):
"""Salsa20/8 http://en.wikipedia.org/wiki/Salsa20"""
# Merged blockxor for speed
for i in xrange(16):
x[i] = B[i] = B[i] ^ src[s_start + i] # depends on [control=['for'], data=['i']]
# This is the actual Salsa 20/8: four identical double rounds
for i in xrange(4):
R(x, 4, 0, 12, 7)
R(x, 8, 4, 0, 9)
R(x, 12, 8, 4, 13)
R(x, 0, 12, 8, 18)
R(x, 9, 5, 1, 7)
R(x, 13, 9, 5, 9)
R(x, 1, 13, 9, 13)
R(x, 5, 1, 13, 18)
R(x, 14, 10, 6, 7)
R(x, 2, 14, 10, 9)
R(x, 6, 2, 14, 13)
R(x, 10, 6, 2, 18)
R(x, 3, 15, 11, 7)
R(x, 7, 3, 15, 9)
R(x, 11, 7, 3, 13)
R(x, 15, 11, 7, 18)
R(x, 1, 0, 3, 7)
R(x, 2, 1, 0, 9)
R(x, 3, 2, 1, 13)
R(x, 0, 3, 2, 18)
R(x, 6, 5, 4, 7)
R(x, 7, 6, 5, 9)
R(x, 4, 7, 6, 13)
R(x, 5, 4, 7, 18)
R(x, 11, 10, 9, 7)
R(x, 8, 11, 10, 9)
R(x, 9, 8, 11, 13)
R(x, 10, 9, 8, 18)
R(x, 12, 15, 14, 7)
R(x, 13, 12, 15, 9)
R(x, 14, 13, 12, 13)
R(x, 15, 14, 13, 18) # depends on [control=['for'], data=[]]
# While we are handling the data, write it to the correct dest.
# The latter half is still part of salsa20
for i in xrange(16):
dest[d_start + i] = B[i] = x[i] + B[i] & 4294967295 # depends on [control=['for'], data=['i']] |
def toDF(self, *cols):
"""Returns a new class:`DataFrame` that with new specified column names
:param cols: list of new column names (string)
>>> df.toDF('f1', 'f2').collect()
[Row(f1=2, f2=u'Alice'), Row(f1=5, f2=u'Bob')]
"""
jdf = self._jdf.toDF(self._jseq(cols))
return DataFrame(jdf, self.sql_ctx) | def function[toDF, parameter[self]]:
constant[Returns a new class:`DataFrame` that with new specified column names
:param cols: list of new column names (string)
>>> df.toDF('f1', 'f2').collect()
[Row(f1=2, f2=u'Alice'), Row(f1=5, f2=u'Bob')]
]
variable[jdf] assign[=] call[name[self]._jdf.toDF, parameter[call[name[self]._jseq, parameter[name[cols]]]]]
return[call[name[DataFrame], parameter[name[jdf], name[self].sql_ctx]]] | keyword[def] identifier[toDF] ( identifier[self] ,* identifier[cols] ):
literal[string]
identifier[jdf] = identifier[self] . identifier[_jdf] . identifier[toDF] ( identifier[self] . identifier[_jseq] ( identifier[cols] ))
keyword[return] identifier[DataFrame] ( identifier[jdf] , identifier[self] . identifier[sql_ctx] ) | def toDF(self, *cols):
"""Returns a new class:`DataFrame` that with new specified column names
:param cols: list of new column names (string)
>>> df.toDF('f1', 'f2').collect()
[Row(f1=2, f2=u'Alice'), Row(f1=5, f2=u'Bob')]
"""
jdf = self._jdf.toDF(self._jseq(cols))
return DataFrame(jdf, self.sql_ctx) |
def get_incidents(self):
"""Get incidents."""
resp = requests.get(CRIME_URL, params=self._get_params(), headers=self.headers)
incidents = [] # type: List[Dict[str, str]]
data = resp.json()
if ATTR_CRIMES not in data:
return incidents
for incident in data.get(ATTR_CRIMES):
if _validate_incident_date_range(incident, self.days):
if _incident_in_types(incident, self.incident_types):
incidents.append(_incident_transform(incident))
return incidents | def function[get_incidents, parameter[self]]:
constant[Get incidents.]
variable[resp] assign[=] call[name[requests].get, parameter[name[CRIME_URL]]]
variable[incidents] assign[=] list[[]]
variable[data] assign[=] call[name[resp].json, parameter[]]
if compare[name[ATTR_CRIMES] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
return[name[incidents]]
for taget[name[incident]] in starred[call[name[data].get, parameter[name[ATTR_CRIMES]]]] begin[:]
if call[name[_validate_incident_date_range], parameter[name[incident], name[self].days]] begin[:]
if call[name[_incident_in_types], parameter[name[incident], name[self].incident_types]] begin[:]
call[name[incidents].append, parameter[call[name[_incident_transform], parameter[name[incident]]]]]
return[name[incidents]] | keyword[def] identifier[get_incidents] ( identifier[self] ):
literal[string]
identifier[resp] = identifier[requests] . identifier[get] ( identifier[CRIME_URL] , identifier[params] = identifier[self] . identifier[_get_params] (), identifier[headers] = identifier[self] . identifier[headers] )
identifier[incidents] =[]
identifier[data] = identifier[resp] . identifier[json] ()
keyword[if] identifier[ATTR_CRIMES] keyword[not] keyword[in] identifier[data] :
keyword[return] identifier[incidents]
keyword[for] identifier[incident] keyword[in] identifier[data] . identifier[get] ( identifier[ATTR_CRIMES] ):
keyword[if] identifier[_validate_incident_date_range] ( identifier[incident] , identifier[self] . identifier[days] ):
keyword[if] identifier[_incident_in_types] ( identifier[incident] , identifier[self] . identifier[incident_types] ):
identifier[incidents] . identifier[append] ( identifier[_incident_transform] ( identifier[incident] ))
keyword[return] identifier[incidents] | def get_incidents(self):
"""Get incidents."""
resp = requests.get(CRIME_URL, params=self._get_params(), headers=self.headers)
incidents = [] # type: List[Dict[str, str]]
data = resp.json()
if ATTR_CRIMES not in data:
return incidents # depends on [control=['if'], data=[]]
for incident in data.get(ATTR_CRIMES):
if _validate_incident_date_range(incident, self.days):
if _incident_in_types(incident, self.incident_types):
incidents.append(_incident_transform(incident)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['incident']]
return incidents |
def read_from_ndef_service(self, *blocks):
"""Read block data from an NDEF compatible tag.
This is a convinience method to read block data from a tag
that has system code 0x12FC (NDEF). For other tags this method
simply returns :const:`None`. All arguments are block numbers
to read. To actually pass a list of block numbers requires
unpacking. The following example calls would have the same
effect of reading 32 byte data from from blocks 1 and 8.::
data = tag.read_from_ndef_service(1, 8)
data = tag.read_from_ndef_service(*list(1, 8))
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
"""
if self.sys == 0x12FC:
sc_list = [ServiceCode(0, 0b001011)]
bc_list = [BlockCode(n) for n in blocks]
return self.read_without_encryption(sc_list, bc_list) | def function[read_from_ndef_service, parameter[self]]:
constant[Read block data from an NDEF compatible tag.
This is a convinience method to read block data from a tag
that has system code 0x12FC (NDEF). For other tags this method
simply returns :const:`None`. All arguments are block numbers
to read. To actually pass a list of block numbers requires
unpacking. The following example calls would have the same
effect of reading 32 byte data from from blocks 1 and 8.::
data = tag.read_from_ndef_service(1, 8)
data = tag.read_from_ndef_service(*list(1, 8))
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
]
if compare[name[self].sys equal[==] constant[4860]] begin[:]
variable[sc_list] assign[=] list[[<ast.Call object at 0x7da2045660e0>]]
variable[bc_list] assign[=] <ast.ListComp object at 0x7da2045643d0>
return[call[name[self].read_without_encryption, parameter[name[sc_list], name[bc_list]]]] | keyword[def] identifier[read_from_ndef_service] ( identifier[self] ,* identifier[blocks] ):
literal[string]
keyword[if] identifier[self] . identifier[sys] == literal[int] :
identifier[sc_list] =[ identifier[ServiceCode] ( literal[int] , literal[int] )]
identifier[bc_list] =[ identifier[BlockCode] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[blocks] ]
keyword[return] identifier[self] . identifier[read_without_encryption] ( identifier[sc_list] , identifier[bc_list] ) | def read_from_ndef_service(self, *blocks):
"""Read block data from an NDEF compatible tag.
This is a convinience method to read block data from a tag
that has system code 0x12FC (NDEF). For other tags this method
simply returns :const:`None`. All arguments are block numbers
to read. To actually pass a list of block numbers requires
unpacking. The following example calls would have the same
effect of reading 32 byte data from from blocks 1 and 8.::
data = tag.read_from_ndef_service(1, 8)
data = tag.read_from_ndef_service(*list(1, 8))
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
"""
if self.sys == 4860:
sc_list = [ServiceCode(0, 11)]
bc_list = [BlockCode(n) for n in blocks]
return self.read_without_encryption(sc_list, bc_list) # depends on [control=['if'], data=[]] |
def has_quantity(self, quantity, include_native=True):
"""
Check if *quantity* is available in this catalog
Parameters
----------
quantity : str
a quantity name to check
include_native : bool, optional
whether or not to include native quantity names when checking
Returns
-------
has_quantity : bool
True if the quantities are all available; otherwise False
"""
if include_native:
return all(q in self._native_quantities for q in self._translate_quantities({quantity}))
return quantity in self._quantity_modifiers | def function[has_quantity, parameter[self, quantity, include_native]]:
constant[
Check if *quantity* is available in this catalog
Parameters
----------
quantity : str
a quantity name to check
include_native : bool, optional
whether or not to include native quantity names when checking
Returns
-------
has_quantity : bool
True if the quantities are all available; otherwise False
]
if name[include_native] begin[:]
return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b23904c0>]]]
return[compare[name[quantity] in name[self]._quantity_modifiers]] | keyword[def] identifier[has_quantity] ( identifier[self] , identifier[quantity] , identifier[include_native] = keyword[True] ):
literal[string]
keyword[if] identifier[include_native] :
keyword[return] identifier[all] ( identifier[q] keyword[in] identifier[self] . identifier[_native_quantities] keyword[for] identifier[q] keyword[in] identifier[self] . identifier[_translate_quantities] ({ identifier[quantity] }))
keyword[return] identifier[quantity] keyword[in] identifier[self] . identifier[_quantity_modifiers] | def has_quantity(self, quantity, include_native=True):
"""
Check if *quantity* is available in this catalog
Parameters
----------
quantity : str
a quantity name to check
include_native : bool, optional
whether or not to include native quantity names when checking
Returns
-------
has_quantity : bool
True if the quantities are all available; otherwise False
"""
if include_native:
return all((q in self._native_quantities for q in self._translate_quantities({quantity}))) # depends on [control=['if'], data=[]]
return quantity in self._quantity_modifiers |
def plot(self, event_names, x_axis='step'):
"""Plots a list of events. Each event (a dir+event_name) is represetented as a line
in the graph.
Args:
event_names: A list of events to plot. Each event_name may correspond to multiple events,
each in a different directory.
x_axis: whether to use step or time as x axis.
"""
if isinstance(event_names, six.string_types):
event_names = [event_names]
events_list = self.get_events(event_names)
for event_name, dir_event_dict in zip(event_names, events_list):
for dir, df in six.iteritems(dir_event_dict):
label = event_name + ':' + dir
x_column = df['step'] if x_axis == 'step' else df['time']
plt.plot(x_column, df['value'], label=label)
plt.legend(loc='best')
plt.show() | def function[plot, parameter[self, event_names, x_axis]]:
constant[Plots a list of events. Each event (a dir+event_name) is represetented as a line
in the graph.
Args:
event_names: A list of events to plot. Each event_name may correspond to multiple events,
each in a different directory.
x_axis: whether to use step or time as x axis.
]
if call[name[isinstance], parameter[name[event_names], name[six].string_types]] begin[:]
variable[event_names] assign[=] list[[<ast.Name object at 0x7da18bc705b0>]]
variable[events_list] assign[=] call[name[self].get_events, parameter[name[event_names]]]
for taget[tuple[[<ast.Name object at 0x7da18bc710c0>, <ast.Name object at 0x7da18bc70430>]]] in starred[call[name[zip], parameter[name[event_names], name[events_list]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18ede78e0>, <ast.Name object at 0x7da18ede7160>]]] in starred[call[name[six].iteritems, parameter[name[dir_event_dict]]]] begin[:]
variable[label] assign[=] binary_operation[binary_operation[name[event_name] + constant[:]] + name[dir]]
variable[x_column] assign[=] <ast.IfExp object at 0x7da18ede7430>
call[name[plt].plot, parameter[name[x_column], call[name[df]][constant[value]]]]
call[name[plt].legend, parameter[]]
call[name[plt].show, parameter[]] | keyword[def] identifier[plot] ( identifier[self] , identifier[event_names] , identifier[x_axis] = literal[string] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[event_names] , identifier[six] . identifier[string_types] ):
identifier[event_names] =[ identifier[event_names] ]
identifier[events_list] = identifier[self] . identifier[get_events] ( identifier[event_names] )
keyword[for] identifier[event_name] , identifier[dir_event_dict] keyword[in] identifier[zip] ( identifier[event_names] , identifier[events_list] ):
keyword[for] identifier[dir] , identifier[df] keyword[in] identifier[six] . identifier[iteritems] ( identifier[dir_event_dict] ):
identifier[label] = identifier[event_name] + literal[string] + identifier[dir]
identifier[x_column] = identifier[df] [ literal[string] ] keyword[if] identifier[x_axis] == literal[string] keyword[else] identifier[df] [ literal[string] ]
identifier[plt] . identifier[plot] ( identifier[x_column] , identifier[df] [ literal[string] ], identifier[label] = identifier[label] )
identifier[plt] . identifier[legend] ( identifier[loc] = literal[string] )
identifier[plt] . identifier[show] () | def plot(self, event_names, x_axis='step'):
"""Plots a list of events. Each event (a dir+event_name) is represetented as a line
in the graph.
Args:
event_names: A list of events to plot. Each event_name may correspond to multiple events,
each in a different directory.
x_axis: whether to use step or time as x axis.
"""
if isinstance(event_names, six.string_types):
event_names = [event_names] # depends on [control=['if'], data=[]]
events_list = self.get_events(event_names)
for (event_name, dir_event_dict) in zip(event_names, events_list):
for (dir, df) in six.iteritems(dir_event_dict):
label = event_name + ':' + dir
x_column = df['step'] if x_axis == 'step' else df['time']
plt.plot(x_column, df['value'], label=label) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
plt.legend(loc='best')
plt.show() |
def port_profile_qos_profile_cee(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
port_profile = ET.SubElement(config, "port-profile", xmlns="urn:brocade.com:mgmt:brocade-port-profile")
name_key = ET.SubElement(port_profile, "name")
name_key.text = kwargs.pop('name')
qos_profile = ET.SubElement(port_profile, "qos-profile")
cee = ET.SubElement(qos_profile, "cee")
cee.text = kwargs.pop('cee')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[port_profile_qos_profile_cee, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[port_profile] assign[=] call[name[ET].SubElement, parameter[name[config], constant[port-profile]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[port_profile], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[qos_profile] assign[=] call[name[ET].SubElement, parameter[name[port_profile], constant[qos-profile]]]
variable[cee] assign[=] call[name[ET].SubElement, parameter[name[qos_profile], constant[cee]]]
name[cee].text assign[=] call[name[kwargs].pop, parameter[constant[cee]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[port_profile_qos_profile_cee] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[port_profile] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[port_profile] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[qos_profile] = identifier[ET] . identifier[SubElement] ( identifier[port_profile] , literal[string] )
identifier[cee] = identifier[ET] . identifier[SubElement] ( identifier[qos_profile] , literal[string] )
identifier[cee] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def port_profile_qos_profile_cee(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
port_profile = ET.SubElement(config, 'port-profile', xmlns='urn:brocade.com:mgmt:brocade-port-profile')
name_key = ET.SubElement(port_profile, 'name')
name_key.text = kwargs.pop('name')
qos_profile = ET.SubElement(port_profile, 'qos-profile')
cee = ET.SubElement(qos_profile, 'cee')
cee.text = kwargs.pop('cee')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def create_datastore_from_yaml_schema(self, yaml_path, delete_first=0,
path=None):
# type: (str, Optional[int], Optional[str]) -> None
"""For tabular data, create a resource in the HDX datastore which enables data preview in HDX from a YAML file
containing a list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'} and optionally a primary key.
If path is not supplied, the file is first downloaded from HDX.
Args:
yaml_path (str): Path to YAML file containing list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}
delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0.
path (Optional[str]): Local path to file that was uploaded. Defaults to None.
Returns:
None
"""
data = load_yaml(yaml_path)
self.create_datastore_from_dict_schema(data, delete_first, path=path) | def function[create_datastore_from_yaml_schema, parameter[self, yaml_path, delete_first, path]]:
constant[For tabular data, create a resource in the HDX datastore which enables data preview in HDX from a YAML file
containing a list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'} and optionally a primary key.
If path is not supplied, the file is first downloaded from HDX.
Args:
yaml_path (str): Path to YAML file containing list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}
delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0.
path (Optional[str]): Local path to file that was uploaded. Defaults to None.
Returns:
None
]
variable[data] assign[=] call[name[load_yaml], parameter[name[yaml_path]]]
call[name[self].create_datastore_from_dict_schema, parameter[name[data], name[delete_first]]] | keyword[def] identifier[create_datastore_from_yaml_schema] ( identifier[self] , identifier[yaml_path] , identifier[delete_first] = literal[int] ,
identifier[path] = keyword[None] ):
literal[string]
identifier[data] = identifier[load_yaml] ( identifier[yaml_path] )
identifier[self] . identifier[create_datastore_from_dict_schema] ( identifier[data] , identifier[delete_first] , identifier[path] = identifier[path] ) | def create_datastore_from_yaml_schema(self, yaml_path, delete_first=0, path=None):
# type: (str, Optional[int], Optional[str]) -> None
"For tabular data, create a resource in the HDX datastore which enables data preview in HDX from a YAML file\n containing a list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'} and optionally a primary key.\n If path is not supplied, the file is first downloaded from HDX.\n\n Args:\n yaml_path (str): Path to YAML file containing list of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}\n delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0.\n path (Optional[str]): Local path to file that was uploaded. Defaults to None.\n\n Returns:\n None\n "
data = load_yaml(yaml_path)
self.create_datastore_from_dict_schema(data, delete_first, path=path) |
def _set_get_mpls_ldp_neighbor_brief(self, v, load=False):
"""
Setter method for get_mpls_ldp_neighbor_brief, mapped from YANG variable /brocade_mpls_rpc/get_mpls_ldp_neighbor_brief (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_mpls_ldp_neighbor_brief is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_mpls_ldp_neighbor_brief() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=get_mpls_ldp_neighbor_brief.get_mpls_ldp_neighbor_brief, is_leaf=True, yang_name="get-mpls-ldp-neighbor-brief", rest_name="get-mpls-ldp-neighbor-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsLdpNeighbor'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """get_mpls_ldp_neighbor_brief must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=get_mpls_ldp_neighbor_brief.get_mpls_ldp_neighbor_brief, is_leaf=True, yang_name="get-mpls-ldp-neighbor-brief", rest_name="get-mpls-ldp-neighbor-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsLdpNeighbor'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__get_mpls_ldp_neighbor_brief = t
if hasattr(self, '_set'):
self._set() | def function[_set_get_mpls_ldp_neighbor_brief, parameter[self, v, load]]:
constant[
Setter method for get_mpls_ldp_neighbor_brief, mapped from YANG variable /brocade_mpls_rpc/get_mpls_ldp_neighbor_brief (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_mpls_ldp_neighbor_brief is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_mpls_ldp_neighbor_brief() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2044c12d0>
name[self].__get_mpls_ldp_neighbor_brief assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_get_mpls_ldp_neighbor_brief] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[get_mpls_ldp_neighbor_brief] . identifier[get_mpls_ldp_neighbor_brief] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__get_mpls_ldp_neighbor_brief] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_get_mpls_ldp_neighbor_brief(self, v, load=False):
"""
Setter method for get_mpls_ldp_neighbor_brief, mapped from YANG variable /brocade_mpls_rpc/get_mpls_ldp_neighbor_brief (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_mpls_ldp_neighbor_brief is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_get_mpls_ldp_neighbor_brief() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=get_mpls_ldp_neighbor_brief.get_mpls_ldp_neighbor_brief, is_leaf=True, yang_name='get-mpls-ldp-neighbor-brief', rest_name='get-mpls-ldp-neighbor-brief', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsLdpNeighbor'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'get_mpls_ldp_neighbor_brief must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=get_mpls_ldp_neighbor_brief.get_mpls_ldp_neighbor_brief, is_leaf=True, yang_name="get-mpls-ldp-neighbor-brief", rest_name="get-mpls-ldp-neighbor-brief", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'full\', u\'actionpoint\': u\'showMplsLdpNeighbor\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__get_mpls_ldp_neighbor_brief = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def keys(self):
"""
:returns: a list of usable keys
:rtype: list
"""
keys = list()
for attribute_name, type_instance in inspect.getmembers(self):
# ignore parameters with __ and if they are methods
if attribute_name.startswith('__') or inspect.ismethod(type_instance):
continue
keys.append(attribute_name)
return keys | def function[keys, parameter[self]]:
constant[
:returns: a list of usable keys
:rtype: list
]
variable[keys] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0bf2740>, <ast.Name object at 0x7da1b0bf3430>]]] in starred[call[name[inspect].getmembers, parameter[name[self]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0a1de10> begin[:]
continue
call[name[keys].append, parameter[name[attribute_name]]]
return[name[keys]] | keyword[def] identifier[keys] ( identifier[self] ):
literal[string]
identifier[keys] = identifier[list] ()
keyword[for] identifier[attribute_name] , identifier[type_instance] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[self] ):
keyword[if] identifier[attribute_name] . identifier[startswith] ( literal[string] ) keyword[or] identifier[inspect] . identifier[ismethod] ( identifier[type_instance] ):
keyword[continue]
identifier[keys] . identifier[append] ( identifier[attribute_name] )
keyword[return] identifier[keys] | def keys(self):
"""
:returns: a list of usable keys
:rtype: list
"""
keys = list()
for (attribute_name, type_instance) in inspect.getmembers(self):
# ignore parameters with __ and if they are methods
if attribute_name.startswith('__') or inspect.ismethod(type_instance):
continue # depends on [control=['if'], data=[]]
keys.append(attribute_name) # depends on [control=['for'], data=[]]
return keys |
def get_random_voxels(dataset, n_voxels):
""" Returns mappable data for a random subset of voxels.
May be useful as a baseline in predictive analyses--e.g., to compare
performance of a more principled feature selection method with simple
random selection.
Args:
dataset: A Dataset instance
n_voxels: An integer specifying the number of random voxels to select.
Returns:
A 2D numpy array with (randomly-selected) voxels in rows and mappables
in columns.
"""
voxels = np.arange(dataset.masker.n_vox_in_vol)
np.random.shuffle(voxels)
selected = voxels[0:n_voxels]
return dataset.get_image_data(voxels=selected) | def function[get_random_voxels, parameter[dataset, n_voxels]]:
constant[ Returns mappable data for a random subset of voxels.
May be useful as a baseline in predictive analyses--e.g., to compare
performance of a more principled feature selection method with simple
random selection.
Args:
dataset: A Dataset instance
n_voxels: An integer specifying the number of random voxels to select.
Returns:
A 2D numpy array with (randomly-selected) voxels in rows and mappables
in columns.
]
variable[voxels] assign[=] call[name[np].arange, parameter[name[dataset].masker.n_vox_in_vol]]
call[name[np].random.shuffle, parameter[name[voxels]]]
variable[selected] assign[=] call[name[voxels]][<ast.Slice object at 0x7da20e956e30>]
return[call[name[dataset].get_image_data, parameter[]]] | keyword[def] identifier[get_random_voxels] ( identifier[dataset] , identifier[n_voxels] ):
literal[string]
identifier[voxels] = identifier[np] . identifier[arange] ( identifier[dataset] . identifier[masker] . identifier[n_vox_in_vol] )
identifier[np] . identifier[random] . identifier[shuffle] ( identifier[voxels] )
identifier[selected] = identifier[voxels] [ literal[int] : identifier[n_voxels] ]
keyword[return] identifier[dataset] . identifier[get_image_data] ( identifier[voxels] = identifier[selected] ) | def get_random_voxels(dataset, n_voxels):
""" Returns mappable data for a random subset of voxels.
May be useful as a baseline in predictive analyses--e.g., to compare
performance of a more principled feature selection method with simple
random selection.
Args:
dataset: A Dataset instance
n_voxels: An integer specifying the number of random voxels to select.
Returns:
A 2D numpy array with (randomly-selected) voxels in rows and mappables
in columns.
"""
voxels = np.arange(dataset.masker.n_vox_in_vol)
np.random.shuffle(voxels)
selected = voxels[0:n_voxels]
return dataset.get_image_data(voxels=selected) |
def kill_process(process):
"""Kill the process group associated with the given process. (posix)"""
logger = logging.getLogger('xenon')
logger.info('Terminating Xenon-GRPC server.')
os.kill(process.pid, signal.SIGINT)
process.wait() | def function[kill_process, parameter[process]]:
constant[Kill the process group associated with the given process. (posix)]
variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[xenon]]]
call[name[logger].info, parameter[constant[Terminating Xenon-GRPC server.]]]
call[name[os].kill, parameter[name[process].pid, name[signal].SIGINT]]
call[name[process].wait, parameter[]] | keyword[def] identifier[kill_process] ( identifier[process] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[os] . identifier[kill] ( identifier[process] . identifier[pid] , identifier[signal] . identifier[SIGINT] )
identifier[process] . identifier[wait] () | def kill_process(process):
"""Kill the process group associated with the given process. (posix)"""
logger = logging.getLogger('xenon')
logger.info('Terminating Xenon-GRPC server.')
os.kill(process.pid, signal.SIGINT)
process.wait() |
def delete(self, list_id, segment_id):
"""
removes an existing list segment from the list. This cannot be undone.
"""
return self._mc_client._delete(url=self._build_path(list_id, 'segments', segment_id)) | def function[delete, parameter[self, list_id, segment_id]]:
constant[
removes an existing list segment from the list. This cannot be undone.
]
return[call[name[self]._mc_client._delete, parameter[]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[list_id] , identifier[segment_id] ):
literal[string]
keyword[return] identifier[self] . identifier[_mc_client] . identifier[_delete] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[list_id] , literal[string] , identifier[segment_id] )) | def delete(self, list_id, segment_id):
"""
removes an existing list segment from the list. This cannot be undone.
"""
return self._mc_client._delete(url=self._build_path(list_id, 'segments', segment_id)) |
def namespace_match(pattern: str, namespace: str):
"""
Matches a namespace pattern against a namespace string. For example, ``*tags`` matches
``passage_tags`` and ``question_tags`` and ``tokens`` matches ``tokens`` but not
``stemmed_tokens``.
"""
if pattern[0] == '*' and namespace.endswith(pattern[1:]):
return True
elif pattern == namespace:
return True
return False | def function[namespace_match, parameter[pattern, namespace]]:
constant[
Matches a namespace pattern against a namespace string. For example, ``*tags`` matches
``passage_tags`` and ``question_tags`` and ``tokens`` matches ``tokens`` but not
``stemmed_tokens``.
]
if <ast.BoolOp object at 0x7da20c76cfd0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[namespace_match] ( identifier[pattern] : identifier[str] , identifier[namespace] : identifier[str] ):
literal[string]
keyword[if] identifier[pattern] [ literal[int] ]== literal[string] keyword[and] identifier[namespace] . identifier[endswith] ( identifier[pattern] [ literal[int] :]):
keyword[return] keyword[True]
keyword[elif] identifier[pattern] == identifier[namespace] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def namespace_match(pattern: str, namespace: str):
"""
Matches a namespace pattern against a namespace string. For example, ``*tags`` matches
``passage_tags`` and ``question_tags`` and ``tokens`` matches ``tokens`` but not
``stemmed_tokens``.
"""
if pattern[0] == '*' and namespace.endswith(pattern[1:]):
return True # depends on [control=['if'], data=[]]
elif pattern == namespace:
return True # depends on [control=['if'], data=[]]
return False |
def data(self, c):
"""Send byte of data to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_high(self._dc)
self._spi.write([c])
else:
# I2C write.
control = 0x40 # Co = 0, DC = 0
self._i2c.write8(control, c) | def function[data, parameter[self, c]]:
constant[Send byte of data to display.]
if compare[name[self]._spi is_not constant[None]] begin[:]
call[name[self]._gpio.set_high, parameter[name[self]._dc]]
call[name[self]._spi.write, parameter[list[[<ast.Name object at 0x7da1b1d35900>]]]] | keyword[def] identifier[data] ( identifier[self] , identifier[c] ):
literal[string]
keyword[if] identifier[self] . identifier[_spi] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_gpio] . identifier[set_high] ( identifier[self] . identifier[_dc] )
identifier[self] . identifier[_spi] . identifier[write] ([ identifier[c] ])
keyword[else] :
identifier[control] = literal[int]
identifier[self] . identifier[_i2c] . identifier[write8] ( identifier[control] , identifier[c] ) | def data(self, c):
"""Send byte of data to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_high(self._dc)
self._spi.write([c]) # depends on [control=['if'], data=[]]
else:
# I2C write.
control = 64 # Co = 0, DC = 0
self._i2c.write8(control, c) |
def _extract_variable_parts(variable_key, variable):
"""Matches a variable to individual parts.
Args:
variable_key: String identifier of the variable in the module scope.
variable: Variable tensor.
Returns:
partitioned: Whether the variable is partitioned.
name: Name of the variable up to the partitioning.
offset: Offset of the variable into the full variable.
Raises:
RuntimeError: In case of unexpected variable format.
"""
name, offset, partitioned = None, None, False
# pylint: disable=protected-access
if variable._save_slice_info:
name = variable_key[:variable_key.rfind("/")]
if not variable._save_slice_info.full_name.endswith(name):
raise RuntimeError("Unexpected handling of partitioned variable.")
offset = variable._save_slice_info.var_offset[0]
partitioned = True
# pylint: enable=protected-access
return partitioned, name, offset | def function[_extract_variable_parts, parameter[variable_key, variable]]:
constant[Matches a variable to individual parts.
Args:
variable_key: String identifier of the variable in the module scope.
variable: Variable tensor.
Returns:
partitioned: Whether the variable is partitioned.
name: Name of the variable up to the partitioning.
offset: Offset of the variable into the full variable.
Raises:
RuntimeError: In case of unexpected variable format.
]
<ast.Tuple object at 0x7da1b20bb400> assign[=] tuple[[<ast.Constant object at 0x7da1b20b8580>, <ast.Constant object at 0x7da1b20bb340>, <ast.Constant object at 0x7da1b20b8790>]]
if name[variable]._save_slice_info begin[:]
variable[name] assign[=] call[name[variable_key]][<ast.Slice object at 0x7da1b20bb7f0>]
if <ast.UnaryOp object at 0x7da1b20bbeb0> begin[:]
<ast.Raise object at 0x7da1b20b9330>
variable[offset] assign[=] call[name[variable]._save_slice_info.var_offset][constant[0]]
variable[partitioned] assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da1b20ba860>, <ast.Name object at 0x7da1b20ba800>, <ast.Name object at 0x7da1b20b8b20>]]] | keyword[def] identifier[_extract_variable_parts] ( identifier[variable_key] , identifier[variable] ):
literal[string]
identifier[name] , identifier[offset] , identifier[partitioned] = keyword[None] , keyword[None] , keyword[False]
keyword[if] identifier[variable] . identifier[_save_slice_info] :
identifier[name] = identifier[variable_key] [: identifier[variable_key] . identifier[rfind] ( literal[string] )]
keyword[if] keyword[not] identifier[variable] . identifier[_save_slice_info] . identifier[full_name] . identifier[endswith] ( identifier[name] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[offset] = identifier[variable] . identifier[_save_slice_info] . identifier[var_offset] [ literal[int] ]
identifier[partitioned] = keyword[True]
keyword[return] identifier[partitioned] , identifier[name] , identifier[offset] | def _extract_variable_parts(variable_key, variable):
"""Matches a variable to individual parts.
Args:
variable_key: String identifier of the variable in the module scope.
variable: Variable tensor.
Returns:
partitioned: Whether the variable is partitioned.
name: Name of the variable up to the partitioning.
offset: Offset of the variable into the full variable.
Raises:
RuntimeError: In case of unexpected variable format.
"""
(name, offset, partitioned) = (None, None, False)
# pylint: disable=protected-access
if variable._save_slice_info:
name = variable_key[:variable_key.rfind('/')]
if not variable._save_slice_info.full_name.endswith(name):
raise RuntimeError('Unexpected handling of partitioned variable.') # depends on [control=['if'], data=[]]
offset = variable._save_slice_info.var_offset[0]
partitioned = True # depends on [control=['if'], data=[]]
# pylint: enable=protected-access
return (partitioned, name, offset) |
def agg(self, aggregations):
"""Multiple aggregations optimized.
Parameters
----------
aggregations : list of str
Which aggregations to perform.
Returns
-------
Series
Series with resulting aggregations.
"""
check_type(aggregations, list)
new_index = Index(np.array(aggregations, dtype=np.bytes_), np.dtype(np.bytes_))
return _series_agg(self, aggregations, new_index) | def function[agg, parameter[self, aggregations]]:
constant[Multiple aggregations optimized.
Parameters
----------
aggregations : list of str
Which aggregations to perform.
Returns
-------
Series
Series with resulting aggregations.
]
call[name[check_type], parameter[name[aggregations], name[list]]]
variable[new_index] assign[=] call[name[Index], parameter[call[name[np].array, parameter[name[aggregations]]], call[name[np].dtype, parameter[name[np].bytes_]]]]
return[call[name[_series_agg], parameter[name[self], name[aggregations], name[new_index]]]] | keyword[def] identifier[agg] ( identifier[self] , identifier[aggregations] ):
literal[string]
identifier[check_type] ( identifier[aggregations] , identifier[list] )
identifier[new_index] = identifier[Index] ( identifier[np] . identifier[array] ( identifier[aggregations] , identifier[dtype] = identifier[np] . identifier[bytes_] ), identifier[np] . identifier[dtype] ( identifier[np] . identifier[bytes_] ))
keyword[return] identifier[_series_agg] ( identifier[self] , identifier[aggregations] , identifier[new_index] ) | def agg(self, aggregations):
"""Multiple aggregations optimized.
Parameters
----------
aggregations : list of str
Which aggregations to perform.
Returns
-------
Series
Series with resulting aggregations.
"""
check_type(aggregations, list)
new_index = Index(np.array(aggregations, dtype=np.bytes_), np.dtype(np.bytes_))
return _series_agg(self, aggregations, new_index) |
def _is_nanpa_number_with_national_prefix(self):
"""Returns true if the current country is a NANPA country and the
national number begins with the national prefix.
"""
# For NANPA numbers beginning with 1[2-9], treat the 1 as the national
# prefix. The reason is that national significant numbers in NANPA
# always start with [2-9] after the national prefix. Numbers
# beginning with 1[01] can only be short/emergency numbers, which
# don't need the national prefix.
return (self._current_metadata.country_code == 1 and self._national_number[0] == '1' and
self._national_number[1] != '0' and self._national_number[1] != '1') | def function[_is_nanpa_number_with_national_prefix, parameter[self]]:
constant[Returns true if the current country is a NANPA country and the
national number begins with the national prefix.
]
return[<ast.BoolOp object at 0x7da1b18bdbd0>] | keyword[def] identifier[_is_nanpa_number_with_national_prefix] ( identifier[self] ):
literal[string]
keyword[return] ( identifier[self] . identifier[_current_metadata] . identifier[country_code] == literal[int] keyword[and] identifier[self] . identifier[_national_number] [ literal[int] ]== literal[string] keyword[and]
identifier[self] . identifier[_national_number] [ literal[int] ]!= literal[string] keyword[and] identifier[self] . identifier[_national_number] [ literal[int] ]!= literal[string] ) | def _is_nanpa_number_with_national_prefix(self):
"""Returns true if the current country is a NANPA country and the
national number begins with the national prefix.
"""
# For NANPA numbers beginning with 1[2-9], treat the 1 as the national
# prefix. The reason is that national significant numbers in NANPA
# always start with [2-9] after the national prefix. Numbers
# beginning with 1[01] can only be short/emergency numbers, which
# don't need the national prefix.
return self._current_metadata.country_code == 1 and self._national_number[0] == '1' and (self._national_number[1] != '0') and (self._national_number[1] != '1') |
def expand_range(range, mul=0, add=0, zero_width=1):
"""
Expand a range with a multiplicative or additive constant
Parameters
----------
range : tuple
Range of data. Size 2.
mul : int | float
Multiplicative constant
add : int | float | timedelta
Additive constant
zero_width : int | float | timedelta
Distance to use if range has zero width
Returns
-------
out : tuple
Expanded range
Examples
--------
>>> expand_range((3, 8))
(3, 8)
>>> expand_range((0, 10), mul=0.1)
(-1.0, 11.0)
>>> expand_range((0, 10), add=2)
(-2, 12)
>>> expand_range((0, 10), mul=.1, add=2)
(-3.0, 13.0)
>>> expand_range((0, 1))
(0, 1)
When the range has zero width
>>> expand_range((5, 5))
(4.5, 5.5)
Notes
-----
If expanding *datetime* or *timedelta* types, **add** and
**zero_width** must be suitable *timedeltas* i.e. You should
not mix types between **Numpy**, **Pandas** and the
:mod:`datetime` module.
In Python 2, you cannot multiplicative constant **mul** cannot be
a :class:`float`.
"""
x = range
# Enforce tuple
try:
x[0]
except TypeError:
x = (x, x)
# The expansion cases
if zero_range(x):
new = x[0]-zero_width/2, x[0]+zero_width/2
else:
dx = (x[1] - x[0]) * mul + add
new = x[0]-dx, x[1]+dx
return new | def function[expand_range, parameter[range, mul, add, zero_width]]:
constant[
Expand a range with a multiplicative or additive constant
Parameters
----------
range : tuple
Range of data. Size 2.
mul : int | float
Multiplicative constant
add : int | float | timedelta
Additive constant
zero_width : int | float | timedelta
Distance to use if range has zero width
Returns
-------
out : tuple
Expanded range
Examples
--------
>>> expand_range((3, 8))
(3, 8)
>>> expand_range((0, 10), mul=0.1)
(-1.0, 11.0)
>>> expand_range((0, 10), add=2)
(-2, 12)
>>> expand_range((0, 10), mul=.1, add=2)
(-3.0, 13.0)
>>> expand_range((0, 1))
(0, 1)
When the range has zero width
>>> expand_range((5, 5))
(4.5, 5.5)
Notes
-----
If expanding *datetime* or *timedelta* types, **add** and
**zero_width** must be suitable *timedeltas* i.e. You should
not mix types between **Numpy**, **Pandas** and the
:mod:`datetime` module.
In Python 2, you cannot multiplicative constant **mul** cannot be
a :class:`float`.
]
variable[x] assign[=] name[range]
<ast.Try object at 0x7da18dc05360>
if call[name[zero_range], parameter[name[x]]] begin[:]
variable[new] assign[=] tuple[[<ast.BinOp object at 0x7da18dc05bd0>, <ast.BinOp object at 0x7da18dc05d50>]]
return[name[new]] | keyword[def] identifier[expand_range] ( identifier[range] , identifier[mul] = literal[int] , identifier[add] = literal[int] , identifier[zero_width] = literal[int] ):
literal[string]
identifier[x] = identifier[range]
keyword[try] :
identifier[x] [ literal[int] ]
keyword[except] identifier[TypeError] :
identifier[x] =( identifier[x] , identifier[x] )
keyword[if] identifier[zero_range] ( identifier[x] ):
identifier[new] = identifier[x] [ literal[int] ]- identifier[zero_width] / literal[int] , identifier[x] [ literal[int] ]+ identifier[zero_width] / literal[int]
keyword[else] :
identifier[dx] =( identifier[x] [ literal[int] ]- identifier[x] [ literal[int] ])* identifier[mul] + identifier[add]
identifier[new] = identifier[x] [ literal[int] ]- identifier[dx] , identifier[x] [ literal[int] ]+ identifier[dx]
keyword[return] identifier[new] | def expand_range(range, mul=0, add=0, zero_width=1):
"""
Expand a range with a multiplicative or additive constant
Parameters
----------
range : tuple
Range of data. Size 2.
mul : int | float
Multiplicative constant
add : int | float | timedelta
Additive constant
zero_width : int | float | timedelta
Distance to use if range has zero width
Returns
-------
out : tuple
Expanded range
Examples
--------
>>> expand_range((3, 8))
(3, 8)
>>> expand_range((0, 10), mul=0.1)
(-1.0, 11.0)
>>> expand_range((0, 10), add=2)
(-2, 12)
>>> expand_range((0, 10), mul=.1, add=2)
(-3.0, 13.0)
>>> expand_range((0, 1))
(0, 1)
When the range has zero width
>>> expand_range((5, 5))
(4.5, 5.5)
Notes
-----
If expanding *datetime* or *timedelta* types, **add** and
**zero_width** must be suitable *timedeltas* i.e. You should
not mix types between **Numpy**, **Pandas** and the
:mod:`datetime` module.
In Python 2, you cannot multiplicative constant **mul** cannot be
a :class:`float`.
"""
x = range
# Enforce tuple
try:
x[0] # depends on [control=['try'], data=[]]
except TypeError:
x = (x, x) # depends on [control=['except'], data=[]]
# The expansion cases
if zero_range(x):
new = (x[0] - zero_width / 2, x[0] + zero_width / 2) # depends on [control=['if'], data=[]]
else:
dx = (x[1] - x[0]) * mul + add
new = (x[0] - dx, x[1] + dx)
return new |
def post_internal_command(self, int_cmd, thread_id):
""" if thread_id is *, post to the '*' queue"""
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd) | def function[post_internal_command, parameter[self, int_cmd, thread_id]]:
constant[ if thread_id is *, post to the '*' queue]
variable[queue] assign[=] call[name[self].get_internal_queue, parameter[name[thread_id]]]
call[name[queue].put, parameter[name[int_cmd]]] | keyword[def] identifier[post_internal_command] ( identifier[self] , identifier[int_cmd] , identifier[thread_id] ):
literal[string]
identifier[queue] = identifier[self] . identifier[get_internal_queue] ( identifier[thread_id] )
identifier[queue] . identifier[put] ( identifier[int_cmd] ) | def post_internal_command(self, int_cmd, thread_id):
""" if thread_id is *, post to the '*' queue"""
queue = self.get_internal_queue(thread_id)
queue.put(int_cmd) |
def album(self):
"""
album as :class:`Album` object
"""
if not self._album:
self._album = Album(self._album_id, self._album_name,
self._artist_id, self._artist_name,
self._cover_url, self._connection)
return self._album | def function[album, parameter[self]]:
constant[
album as :class:`Album` object
]
if <ast.UnaryOp object at 0x7da20c76fa30> begin[:]
name[self]._album assign[=] call[name[Album], parameter[name[self]._album_id, name[self]._album_name, name[self]._artist_id, name[self]._artist_name, name[self]._cover_url, name[self]._connection]]
return[name[self]._album] | keyword[def] identifier[album] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_album] :
identifier[self] . identifier[_album] = identifier[Album] ( identifier[self] . identifier[_album_id] , identifier[self] . identifier[_album_name] ,
identifier[self] . identifier[_artist_id] , identifier[self] . identifier[_artist_name] ,
identifier[self] . identifier[_cover_url] , identifier[self] . identifier[_connection] )
keyword[return] identifier[self] . identifier[_album] | def album(self):
"""
album as :class:`Album` object
"""
if not self._album:
self._album = Album(self._album_id, self._album_name, self._artist_id, self._artist_name, self._cover_url, self._connection) # depends on [control=['if'], data=[]]
return self._album |
def getWindowByPID(self, pid, order=0):
""" Returns a handle for the first window that matches the provided PID """
for w in self._get_window_list():
if "kCGWindowOwnerPID" in w and w["kCGWindowOwnerPID"] == pid:
# Matches - make sure we get it in the correct order
if order == 0:
return w["kCGWindowNumber"]
else:
order -= 1
raise OSError("Could not find window for PID {} at index {}".format(pid, order)) | def function[getWindowByPID, parameter[self, pid, order]]:
constant[ Returns a handle for the first window that matches the provided PID ]
for taget[name[w]] in starred[call[name[self]._get_window_list, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18dc9ab30> begin[:]
if compare[name[order] equal[==] constant[0]] begin[:]
return[call[name[w]][constant[kCGWindowNumber]]]
<ast.Raise object at 0x7da2041dad10> | keyword[def] identifier[getWindowByPID] ( identifier[self] , identifier[pid] , identifier[order] = literal[int] ):
literal[string]
keyword[for] identifier[w] keyword[in] identifier[self] . identifier[_get_window_list] ():
keyword[if] literal[string] keyword[in] identifier[w] keyword[and] identifier[w] [ literal[string] ]== identifier[pid] :
keyword[if] identifier[order] == literal[int] :
keyword[return] identifier[w] [ literal[string] ]
keyword[else] :
identifier[order] -= literal[int]
keyword[raise] identifier[OSError] ( literal[string] . identifier[format] ( identifier[pid] , identifier[order] )) | def getWindowByPID(self, pid, order=0):
""" Returns a handle for the first window that matches the provided PID """
for w in self._get_window_list():
if 'kCGWindowOwnerPID' in w and w['kCGWindowOwnerPID'] == pid:
# Matches - make sure we get it in the correct order
if order == 0:
return w['kCGWindowNumber'] # depends on [control=['if'], data=[]]
else:
order -= 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['w']]
raise OSError('Could not find window for PID {} at index {}'.format(pid, order)) |
def infoObject(object, cat, format, *args):
"""
Log an informational message in the given category.
"""
doLog(INFO, object, cat, format, args) | def function[infoObject, parameter[object, cat, format]]:
constant[
Log an informational message in the given category.
]
call[name[doLog], parameter[name[INFO], name[object], name[cat], name[format], name[args]]] | keyword[def] identifier[infoObject] ( identifier[object] , identifier[cat] , identifier[format] ,* identifier[args] ):
literal[string]
identifier[doLog] ( identifier[INFO] , identifier[object] , identifier[cat] , identifier[format] , identifier[args] ) | def infoObject(object, cat, format, *args):
"""
Log an informational message in the given category.
"""
doLog(INFO, object, cat, format, args) |
def dmp(self, thing):
"""Dump the content of an object in a dict for wdb.js"""
def safe_getattr(key):
"""Avoid crash on getattr"""
try:
return getattr(thing, key)
except Exception as e:
return 'Error getting attr "%s" from "%s" (%s: %s)' % (
key, thing, type(e).__name__, e
)
return dict((
escape(key), {
'val': self.safe_better_repr(safe_getattr(key)),
'type': type(safe_getattr(key)).__name__
}
) for key in dir(thing)) | def function[dmp, parameter[self, thing]]:
constant[Dump the content of an object in a dict for wdb.js]
def function[safe_getattr, parameter[key]]:
constant[Avoid crash on getattr]
<ast.Try object at 0x7da204344a00>
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da204347f70>]]] | keyword[def] identifier[dmp] ( identifier[self] , identifier[thing] ):
literal[string]
keyword[def] identifier[safe_getattr] ( identifier[key] ):
literal[string]
keyword[try] :
keyword[return] identifier[getattr] ( identifier[thing] , identifier[key] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] literal[string] %(
identifier[key] , identifier[thing] , identifier[type] ( identifier[e] ). identifier[__name__] , identifier[e]
)
keyword[return] identifier[dict] ((
identifier[escape] ( identifier[key] ),{
literal[string] : identifier[self] . identifier[safe_better_repr] ( identifier[safe_getattr] ( identifier[key] )),
literal[string] : identifier[type] ( identifier[safe_getattr] ( identifier[key] )). identifier[__name__]
}
) keyword[for] identifier[key] keyword[in] identifier[dir] ( identifier[thing] )) | def dmp(self, thing):
"""Dump the content of an object in a dict for wdb.js"""
def safe_getattr(key):
"""Avoid crash on getattr"""
try:
return getattr(thing, key) # depends on [control=['try'], data=[]]
except Exception as e:
return 'Error getting attr "%s" from "%s" (%s: %s)' % (key, thing, type(e).__name__, e) # depends on [control=['except'], data=['e']]
return dict(((escape(key), {'val': self.safe_better_repr(safe_getattr(key)), 'type': type(safe_getattr(key)).__name__}) for key in dir(thing))) |
def _rotate_to_arbitrary_vector(v,a,inv=False,_dontcutsmall=False):
""" Return a rotation matrix that rotates v to align with unit vector a
i.e. R . v = |v|\hat{a} """
normv= v/numpy.tile(numpy.sqrt(numpy.sum(v**2.,axis=1)),(3,1)).T
rotaxis= numpy.cross(normv,a)
rotaxis/= numpy.tile(numpy.sqrt(numpy.sum(rotaxis**2.,axis=1)),(3,1)).T
crossmatrix= numpy.empty((len(v),3,3))
crossmatrix[:,0,:]= numpy.cross(rotaxis,[1,0,0])
crossmatrix[:,1,:]= numpy.cross(rotaxis,[0,1,0])
crossmatrix[:,2,:]= numpy.cross(rotaxis,[0,0,1])
costheta= numpy.dot(normv,a)
sintheta= numpy.sqrt(1.-costheta**2.)
if inv: sgn= 1.
else: sgn= -1.
out= numpy.tile(costheta,(3,3,1)).T*numpy.tile(numpy.eye(3),(len(v),1,1))\
+sgn*numpy.tile(sintheta,(3,3,1)).T*crossmatrix\
+numpy.tile(1.-costheta,(3,3,1)).T\
*(rotaxis[:,:,numpy.newaxis]*rotaxis[:,numpy.newaxis,:])
if not _dontcutsmall:
out[numpy.fabs(costheta-1.) < 10.**-10.]= numpy.eye(3)
out[numpy.fabs(costheta+1.) < 10.**-10.]= -numpy.eye(3)
return out | def function[_rotate_to_arbitrary_vector, parameter[v, a, inv, _dontcutsmall]]:
constant[ Return a rotation matrix that rotates v to align with unit vector a
i.e. R . v = |v|\hat{a} ]
variable[normv] assign[=] binary_operation[name[v] / call[name[numpy].tile, parameter[call[name[numpy].sqrt, parameter[call[name[numpy].sum, parameter[binary_operation[name[v] ** constant[2.0]]]]]], tuple[[<ast.Constant object at 0x7da1b0e9db70>, <ast.Constant object at 0x7da1b0e9dbd0>]]]].T]
variable[rotaxis] assign[=] call[name[numpy].cross, parameter[name[normv], name[a]]]
<ast.AugAssign object at 0x7da1b0e9da80>
variable[crossmatrix] assign[=] call[name[numpy].empty, parameter[tuple[[<ast.Call object at 0x7da1b0e9e0b0>, <ast.Constant object at 0x7da1b0e9e6b0>, <ast.Constant object at 0x7da1b0e9df90>]]]]
call[name[crossmatrix]][tuple[[<ast.Slice object at 0x7da1b0e9e2f0>, <ast.Constant object at 0x7da1b0e9e620>, <ast.Slice object at 0x7da1b0e9e290>]]] assign[=] call[name[numpy].cross, parameter[name[rotaxis], list[[<ast.Constant object at 0x7da1b0e8ef80>, <ast.Constant object at 0x7da1b0e8e7a0>, <ast.Constant object at 0x7da1b0e8caf0>]]]]
call[name[crossmatrix]][tuple[[<ast.Slice object at 0x7da1b0e8f1c0>, <ast.Constant object at 0x7da1b0e8d870>, <ast.Slice object at 0x7da1b0e8fac0>]]] assign[=] call[name[numpy].cross, parameter[name[rotaxis], list[[<ast.Constant object at 0x7da1b0e8f220>, <ast.Constant object at 0x7da1b0e8f070>, <ast.Constant object at 0x7da1b0e8ecb0>]]]]
call[name[crossmatrix]][tuple[[<ast.Slice object at 0x7da1b0e8e230>, <ast.Constant object at 0x7da1b0e8e050>, <ast.Slice object at 0x7da1b0e8ee30>]]] assign[=] call[name[numpy].cross, parameter[name[rotaxis], list[[<ast.Constant object at 0x7da1b0e8df60>, <ast.Constant object at 0x7da1b0e8eef0>, <ast.Constant object at 0x7da1b0e8f0d0>]]]]
variable[costheta] assign[=] call[name[numpy].dot, parameter[name[normv], name[a]]]
variable[sintheta] assign[=] call[name[numpy].sqrt, parameter[binary_operation[constant[1.0] - binary_operation[name[costheta] ** constant[2.0]]]]]
if name[inv] begin[:]
variable[sgn] assign[=] constant[1.0]
variable[out] assign[=] binary_operation[binary_operation[binary_operation[call[name[numpy].tile, parameter[name[costheta], tuple[[<ast.Constant object at 0x7da1b0e8c970>, <ast.Constant object at 0x7da1b0e8e680>, <ast.Constant object at 0x7da1b0e8d780>]]]].T * call[name[numpy].tile, parameter[call[name[numpy].eye, parameter[constant[3]]], tuple[[<ast.Call object at 0x7da1b0e8df00>, <ast.Constant object at 0x7da1b0e8d930>, <ast.Constant object at 0x7da1b0e8eb30>]]]]] + binary_operation[binary_operation[name[sgn] * call[name[numpy].tile, parameter[name[sintheta], tuple[[<ast.Constant object at 0x7da1b0e8f640>, <ast.Constant object at 0x7da1b0e8ea10>, <ast.Constant object at 0x7da1b0e8d510>]]]].T] * name[crossmatrix]]] + binary_operation[call[name[numpy].tile, parameter[binary_operation[constant[1.0] - name[costheta]], tuple[[<ast.Constant object at 0x7da1b0e8e200>, <ast.Constant object at 0x7da1b0e8fc10>, <ast.Constant object at 0x7da1b0e8c490>]]]].T * binary_operation[call[name[rotaxis]][tuple[[<ast.Slice object at 0x7da1b0e8c520>, <ast.Slice object at 0x7da1b0e8e6b0>, <ast.Attribute object at 0x7da1b0e8c5b0>]]] * call[name[rotaxis]][tuple[[<ast.Slice object at 0x7da1b0e8f3d0>, <ast.Attribute object at 0x7da1b0e8d960>, <ast.Slice object at 0x7da1b0e8e950>]]]]]]
if <ast.UnaryOp object at 0x7da1b0e8f010> begin[:]
call[name[out]][compare[call[name[numpy].fabs, parameter[binary_operation[name[costheta] - constant[1.0]]]] less[<] binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0ea4fa0>]]] assign[=] call[name[numpy].eye, parameter[constant[3]]]
call[name[out]][compare[call[name[numpy].fabs, parameter[binary_operation[name[costheta] + constant[1.0]]]] less[<] binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0e8ef50>]]] assign[=] <ast.UnaryOp object at 0x7da1b0e8ec50>
return[name[out]] | keyword[def] identifier[_rotate_to_arbitrary_vector] ( identifier[v] , identifier[a] , identifier[inv] = keyword[False] , identifier[_dontcutsmall] = keyword[False] ):
literal[string]
identifier[normv] = identifier[v] / identifier[numpy] . identifier[tile] ( identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[v] ** literal[int] , identifier[axis] = literal[int] )),( literal[int] , literal[int] )). identifier[T]
identifier[rotaxis] = identifier[numpy] . identifier[cross] ( identifier[normv] , identifier[a] )
identifier[rotaxis] /= identifier[numpy] . identifier[tile] ( identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[rotaxis] ** literal[int] , identifier[axis] = literal[int] )),( literal[int] , literal[int] )). identifier[T]
identifier[crossmatrix] = identifier[numpy] . identifier[empty] (( identifier[len] ( identifier[v] ), literal[int] , literal[int] ))
identifier[crossmatrix] [:, literal[int] ,:]= identifier[numpy] . identifier[cross] ( identifier[rotaxis] ,[ literal[int] , literal[int] , literal[int] ])
identifier[crossmatrix] [:, literal[int] ,:]= identifier[numpy] . identifier[cross] ( identifier[rotaxis] ,[ literal[int] , literal[int] , literal[int] ])
identifier[crossmatrix] [:, literal[int] ,:]= identifier[numpy] . identifier[cross] ( identifier[rotaxis] ,[ literal[int] , literal[int] , literal[int] ])
identifier[costheta] = identifier[numpy] . identifier[dot] ( identifier[normv] , identifier[a] )
identifier[sintheta] = identifier[numpy] . identifier[sqrt] ( literal[int] - identifier[costheta] ** literal[int] )
keyword[if] identifier[inv] : identifier[sgn] = literal[int]
keyword[else] : identifier[sgn] =- literal[int]
identifier[out] = identifier[numpy] . identifier[tile] ( identifier[costheta] ,( literal[int] , literal[int] , literal[int] )). identifier[T] * identifier[numpy] . identifier[tile] ( identifier[numpy] . identifier[eye] ( literal[int] ),( identifier[len] ( identifier[v] ), literal[int] , literal[int] ))+ identifier[sgn] * identifier[numpy] . identifier[tile] ( identifier[sintheta] ,( literal[int] , literal[int] , literal[int] )). identifier[T] * identifier[crossmatrix] + identifier[numpy] . identifier[tile] ( literal[int] - identifier[costheta] ,( literal[int] , literal[int] , literal[int] )). identifier[T] *( identifier[rotaxis] [:,:, identifier[numpy] . identifier[newaxis] ]* identifier[rotaxis] [:, identifier[numpy] . identifier[newaxis] ,:])
keyword[if] keyword[not] identifier[_dontcutsmall] :
identifier[out] [ identifier[numpy] . identifier[fabs] ( identifier[costheta] - literal[int] )< literal[int] **- literal[int] ]= identifier[numpy] . identifier[eye] ( literal[int] )
identifier[out] [ identifier[numpy] . identifier[fabs] ( identifier[costheta] + literal[int] )< literal[int] **- literal[int] ]=- identifier[numpy] . identifier[eye] ( literal[int] )
keyword[return] identifier[out] | def _rotate_to_arbitrary_vector(v, a, inv=False, _dontcutsmall=False):
""" Return a rotation matrix that rotates v to align with unit vector a
i.e. R . v = |v|\\hat{a} """
normv = v / numpy.tile(numpy.sqrt(numpy.sum(v ** 2.0, axis=1)), (3, 1)).T
rotaxis = numpy.cross(normv, a)
rotaxis /= numpy.tile(numpy.sqrt(numpy.sum(rotaxis ** 2.0, axis=1)), (3, 1)).T
crossmatrix = numpy.empty((len(v), 3, 3))
crossmatrix[:, 0, :] = numpy.cross(rotaxis, [1, 0, 0])
crossmatrix[:, 1, :] = numpy.cross(rotaxis, [0, 1, 0])
crossmatrix[:, 2, :] = numpy.cross(rotaxis, [0, 0, 1])
costheta = numpy.dot(normv, a)
sintheta = numpy.sqrt(1.0 - costheta ** 2.0)
if inv:
sgn = 1.0 # depends on [control=['if'], data=[]]
else:
sgn = -1.0
out = numpy.tile(costheta, (3, 3, 1)).T * numpy.tile(numpy.eye(3), (len(v), 1, 1)) + sgn * numpy.tile(sintheta, (3, 3, 1)).T * crossmatrix + numpy.tile(1.0 - costheta, (3, 3, 1)).T * (rotaxis[:, :, numpy.newaxis] * rotaxis[:, numpy.newaxis, :])
if not _dontcutsmall:
out[numpy.fabs(costheta - 1.0) < 10.0 ** (-10.0)] = numpy.eye(3)
out[numpy.fabs(costheta + 1.0) < 10.0 ** (-10.0)] = -numpy.eye(3) # depends on [control=['if'], data=[]]
return out |
def inject_basic_program(self, ascii_listing):
"""
save the given ASCII BASIC program listing into the emulator RAM.
"""
program_start = self.cpu.memory.read_word(
self.machine_api.PROGRAM_START_ADDR
)
tokens = self.machine_api.ascii_listing2program_dump(ascii_listing)
self.cpu.memory.load(program_start, tokens)
log.critical("BASIC program injected into Memory.")
# Update the BASIC addresses:
program_end = program_start + len(tokens)
self.cpu.memory.write_word(self.machine_api.VARIABLES_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.ARRAY_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.FREE_SPACE_START_ADDR, program_end)
log.critical("BASIC addresses updated.") | def function[inject_basic_program, parameter[self, ascii_listing]]:
constant[
save the given ASCII BASIC program listing into the emulator RAM.
]
variable[program_start] assign[=] call[name[self].cpu.memory.read_word, parameter[name[self].machine_api.PROGRAM_START_ADDR]]
variable[tokens] assign[=] call[name[self].machine_api.ascii_listing2program_dump, parameter[name[ascii_listing]]]
call[name[self].cpu.memory.load, parameter[name[program_start], name[tokens]]]
call[name[log].critical, parameter[constant[BASIC program injected into Memory.]]]
variable[program_end] assign[=] binary_operation[name[program_start] + call[name[len], parameter[name[tokens]]]]
call[name[self].cpu.memory.write_word, parameter[name[self].machine_api.VARIABLES_START_ADDR, name[program_end]]]
call[name[self].cpu.memory.write_word, parameter[name[self].machine_api.ARRAY_START_ADDR, name[program_end]]]
call[name[self].cpu.memory.write_word, parameter[name[self].machine_api.FREE_SPACE_START_ADDR, name[program_end]]]
call[name[log].critical, parameter[constant[BASIC addresses updated.]]] | keyword[def] identifier[inject_basic_program] ( identifier[self] , identifier[ascii_listing] ):
literal[string]
identifier[program_start] = identifier[self] . identifier[cpu] . identifier[memory] . identifier[read_word] (
identifier[self] . identifier[machine_api] . identifier[PROGRAM_START_ADDR]
)
identifier[tokens] = identifier[self] . identifier[machine_api] . identifier[ascii_listing2program_dump] ( identifier[ascii_listing] )
identifier[self] . identifier[cpu] . identifier[memory] . identifier[load] ( identifier[program_start] , identifier[tokens] )
identifier[log] . identifier[critical] ( literal[string] )
identifier[program_end] = identifier[program_start] + identifier[len] ( identifier[tokens] )
identifier[self] . identifier[cpu] . identifier[memory] . identifier[write_word] ( identifier[self] . identifier[machine_api] . identifier[VARIABLES_START_ADDR] , identifier[program_end] )
identifier[self] . identifier[cpu] . identifier[memory] . identifier[write_word] ( identifier[self] . identifier[machine_api] . identifier[ARRAY_START_ADDR] , identifier[program_end] )
identifier[self] . identifier[cpu] . identifier[memory] . identifier[write_word] ( identifier[self] . identifier[machine_api] . identifier[FREE_SPACE_START_ADDR] , identifier[program_end] )
identifier[log] . identifier[critical] ( literal[string] ) | def inject_basic_program(self, ascii_listing):
"""
save the given ASCII BASIC program listing into the emulator RAM.
"""
program_start = self.cpu.memory.read_word(self.machine_api.PROGRAM_START_ADDR)
tokens = self.machine_api.ascii_listing2program_dump(ascii_listing)
self.cpu.memory.load(program_start, tokens)
log.critical('BASIC program injected into Memory.')
# Update the BASIC addresses:
program_end = program_start + len(tokens)
self.cpu.memory.write_word(self.machine_api.VARIABLES_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.ARRAY_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.FREE_SPACE_START_ADDR, program_end)
log.critical('BASIC addresses updated.') |
def __get_ssh_keys_id_or_fingerprint(ssh_keys, token, name):
"""
Check and return a list of SSH key IDs or fingerprints according
to DigitalOcean's API. This method is used to check and create a
droplet with the correct SSH keys.
"""
ssh_keys_id = list()
for ssh_key in ssh_keys:
if type(ssh_key) in [int, type(2 ** 64)]:
ssh_keys_id.append(int(ssh_key))
elif type(ssh_key) == SSHKey:
ssh_keys_id.append(ssh_key.id)
elif type(ssh_key) in [type(u''), type('')]:
# ssh_key could either be a fingerprint or a public key
#
# type(u'') and type('') is the same in python 3 but
# different in 2. See:
# https://github.com/koalalorenzo/python-digitalocean/issues/80
regexp_of_fingerprint = '([0-9a-fA-F]{2}:){15}[0-9a-fA-F]'
match = re.match(regexp_of_fingerprint, ssh_key)
if match is not None and match.end() == len(ssh_key) - 1:
ssh_keys_id.append(ssh_key)
else:
key = SSHKey()
key.token = token
results = key.load_by_pub_key(ssh_key)
if results is None:
key.public_key = ssh_key
key.name = "SSH Key %s" % name
key.create()
else:
key = results
ssh_keys_id.append(key.id)
else:
raise BadSSHKeyFormat(
"Droplet.ssh_keys should be a list of IDs, public keys"
+ " or fingerprints."
)
return ssh_keys_id | def function[__get_ssh_keys_id_or_fingerprint, parameter[ssh_keys, token, name]]:
constant[
Check and return a list of SSH key IDs or fingerprints according
to DigitalOcean's API. This method is used to check and create a
droplet with the correct SSH keys.
]
variable[ssh_keys_id] assign[=] call[name[list], parameter[]]
for taget[name[ssh_key]] in starred[name[ssh_keys]] begin[:]
if compare[call[name[type], parameter[name[ssh_key]]] in list[[<ast.Name object at 0x7da20e956200>, <ast.Call object at 0x7da20e954520>]]] begin[:]
call[name[ssh_keys_id].append, parameter[call[name[int], parameter[name[ssh_key]]]]]
return[name[ssh_keys_id]] | keyword[def] identifier[__get_ssh_keys_id_or_fingerprint] ( identifier[ssh_keys] , identifier[token] , identifier[name] ):
literal[string]
identifier[ssh_keys_id] = identifier[list] ()
keyword[for] identifier[ssh_key] keyword[in] identifier[ssh_keys] :
keyword[if] identifier[type] ( identifier[ssh_key] ) keyword[in] [ identifier[int] , identifier[type] ( literal[int] ** literal[int] )]:
identifier[ssh_keys_id] . identifier[append] ( identifier[int] ( identifier[ssh_key] ))
keyword[elif] identifier[type] ( identifier[ssh_key] )== identifier[SSHKey] :
identifier[ssh_keys_id] . identifier[append] ( identifier[ssh_key] . identifier[id] )
keyword[elif] identifier[type] ( identifier[ssh_key] ) keyword[in] [ identifier[type] ( literal[string] ), identifier[type] ( literal[string] )]:
identifier[regexp_of_fingerprint] = literal[string]
identifier[match] = identifier[re] . identifier[match] ( identifier[regexp_of_fingerprint] , identifier[ssh_key] )
keyword[if] identifier[match] keyword[is] keyword[not] keyword[None] keyword[and] identifier[match] . identifier[end] ()== identifier[len] ( identifier[ssh_key] )- literal[int] :
identifier[ssh_keys_id] . identifier[append] ( identifier[ssh_key] )
keyword[else] :
identifier[key] = identifier[SSHKey] ()
identifier[key] . identifier[token] = identifier[token]
identifier[results] = identifier[key] . identifier[load_by_pub_key] ( identifier[ssh_key] )
keyword[if] identifier[results] keyword[is] keyword[None] :
identifier[key] . identifier[public_key] = identifier[ssh_key]
identifier[key] . identifier[name] = literal[string] % identifier[name]
identifier[key] . identifier[create] ()
keyword[else] :
identifier[key] = identifier[results]
identifier[ssh_keys_id] . identifier[append] ( identifier[key] . identifier[id] )
keyword[else] :
keyword[raise] identifier[BadSSHKeyFormat] (
literal[string]
+ literal[string]
)
keyword[return] identifier[ssh_keys_id] | def __get_ssh_keys_id_or_fingerprint(ssh_keys, token, name):
"""
Check and return a list of SSH key IDs or fingerprints according
to DigitalOcean's API. This method is used to check and create a
droplet with the correct SSH keys.
"""
ssh_keys_id = list()
for ssh_key in ssh_keys:
if type(ssh_key) in [int, type(2 ** 64)]:
ssh_keys_id.append(int(ssh_key)) # depends on [control=['if'], data=[]]
elif type(ssh_key) == SSHKey:
ssh_keys_id.append(ssh_key.id) # depends on [control=['if'], data=[]]
elif type(ssh_key) in [type(u''), type('')]:
# ssh_key could either be a fingerprint or a public key
#
# type(u'') and type('') is the same in python 3 but
# different in 2. See:
# https://github.com/koalalorenzo/python-digitalocean/issues/80
regexp_of_fingerprint = '([0-9a-fA-F]{2}:){15}[0-9a-fA-F]'
match = re.match(regexp_of_fingerprint, ssh_key)
if match is not None and match.end() == len(ssh_key) - 1:
ssh_keys_id.append(ssh_key) # depends on [control=['if'], data=[]]
else:
key = SSHKey()
key.token = token
results = key.load_by_pub_key(ssh_key)
if results is None:
key.public_key = ssh_key
key.name = 'SSH Key %s' % name
key.create() # depends on [control=['if'], data=[]]
else:
key = results
ssh_keys_id.append(key.id) # depends on [control=['if'], data=[]]
else:
raise BadSSHKeyFormat('Droplet.ssh_keys should be a list of IDs, public keys' + ' or fingerprints.') # depends on [control=['for'], data=['ssh_key']]
return ssh_keys_id |
def _exists(fs, path):
"""
Check that the given path exists on the filesystem.
Note that unlike `os.path.exists`, we *do* propagate file system errors
other than a non-existent path or non-existent directory component.
E.g., should EPERM or ELOOP be raised, an exception will bubble up.
"""
try:
fs.stat(path)
except (exceptions.FileNotFound, exceptions.NotADirectory):
return False
return True | def function[_exists, parameter[fs, path]]:
constant[
Check that the given path exists on the filesystem.
Note that unlike `os.path.exists`, we *do* propagate file system errors
other than a non-existent path or non-existent directory component.
E.g., should EPERM or ELOOP be raised, an exception will bubble up.
]
<ast.Try object at 0x7da20c795510>
return[constant[True]] | keyword[def] identifier[_exists] ( identifier[fs] , identifier[path] ):
literal[string]
keyword[try] :
identifier[fs] . identifier[stat] ( identifier[path] )
keyword[except] ( identifier[exceptions] . identifier[FileNotFound] , identifier[exceptions] . identifier[NotADirectory] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def _exists(fs, path):
"""
Check that the given path exists on the filesystem.
Note that unlike `os.path.exists`, we *do* propagate file system errors
other than a non-existent path or non-existent directory component.
E.g., should EPERM or ELOOP be raised, an exception will bubble up.
"""
try:
fs.stat(path) # depends on [control=['try'], data=[]]
except (exceptions.FileNotFound, exceptions.NotADirectory):
return False # depends on [control=['except'], data=[]]
return True |
def register_entity(self, entity_value, entity_type, alias_of=None, domain=0):
"""
Register an entity to be tagged in potential parse results.
Args:
entity_value(str): the value/proper name of an entity instance
(Ex: "The Big Bang Theory")
entity_type(str): the type/tag of an entity instance (Ex: "Television Show")
domain(str): a string representing the domain you wish to add the entity to
"""
if domain not in self.domains:
self.register_domain(domain=domain)
self.domains[domain].register_entity(entity_value=entity_value,
entity_type=entity_type,
alias_of=alias_of) | def function[register_entity, parameter[self, entity_value, entity_type, alias_of, domain]]:
constant[
Register an entity to be tagged in potential parse results.
Args:
entity_value(str): the value/proper name of an entity instance
(Ex: "The Big Bang Theory")
entity_type(str): the type/tag of an entity instance (Ex: "Television Show")
domain(str): a string representing the domain you wish to add the entity to
]
if compare[name[domain] <ast.NotIn object at 0x7da2590d7190> name[self].domains] begin[:]
call[name[self].register_domain, parameter[]]
call[call[name[self].domains][name[domain]].register_entity, parameter[]] | keyword[def] identifier[register_entity] ( identifier[self] , identifier[entity_value] , identifier[entity_type] , identifier[alias_of] = keyword[None] , identifier[domain] = literal[int] ):
literal[string]
keyword[if] identifier[domain] keyword[not] keyword[in] identifier[self] . identifier[domains] :
identifier[self] . identifier[register_domain] ( identifier[domain] = identifier[domain] )
identifier[self] . identifier[domains] [ identifier[domain] ]. identifier[register_entity] ( identifier[entity_value] = identifier[entity_value] ,
identifier[entity_type] = identifier[entity_type] ,
identifier[alias_of] = identifier[alias_of] ) | def register_entity(self, entity_value, entity_type, alias_of=None, domain=0):
"""
Register an entity to be tagged in potential parse results.
Args:
entity_value(str): the value/proper name of an entity instance
(Ex: "The Big Bang Theory")
entity_type(str): the type/tag of an entity instance (Ex: "Television Show")
domain(str): a string representing the domain you wish to add the entity to
"""
if domain not in self.domains:
self.register_domain(domain=domain) # depends on [control=['if'], data=['domain']]
self.domains[domain].register_entity(entity_value=entity_value, entity_type=entity_type, alias_of=alias_of) |
def direct_deployment_mode(command, parser, cluster, cl_args):
'''
check the direct deployment mode for the given cluster
if it is valid return the valid set of args
:param command:
:param parser:
:param cluster:
:param cl_args:
:return:
'''
cluster = cl_args['cluster']
try:
config_path = cl_args['config_path']
override_config_file = config.parse_override_config_and_write_file(cl_args['config_property'])
except KeyError:
# if some of the arguments are not found, print error and exit
subparser = config.get_subparser(parser, command)
print(subparser.format_help())
return dict()
# check if the cluster config directory exists
if not cdefs.check_direct_mode_cluster_definition(cluster, config_path):
Log.error("Cluster config directory \'%s\' does not exist", config_path)
return dict()
config_path = config.get_heron_cluster_conf_dir(cluster, config_path)
if not os.path.isdir(config_path):
Log.error("Cluster config directory \'%s\' does not exist", config_path)
return dict()
Log.info("Using cluster definition in %s" % config_path)
try:
cluster_role_env = (cl_args['cluster'], cl_args['role'], cl_args['environ'])
config.direct_mode_cluster_role_env(cluster_role_env, config_path)
cluster_tuple = config.defaults_cluster_role_env(cluster_role_env)
except Exception as ex:
Log.error("Argument cluster/[role]/[env] is not correct: %s", str(ex))
return dict()
new_cl_args = dict()
new_cl_args['cluster'] = cluster_tuple[0]
new_cl_args['role'] = cluster_tuple[1]
new_cl_args['environ'] = cluster_tuple[2]
new_cl_args['config_path'] = config_path
new_cl_args['override_config_file'] = override_config_file
new_cl_args['deploy_mode'] = config.DIRECT_MODE
cl_args.update(new_cl_args)
return cl_args | def function[direct_deployment_mode, parameter[command, parser, cluster, cl_args]]:
constant[
check the direct deployment mode for the given cluster
if it is valid return the valid set of args
:param command:
:param parser:
:param cluster:
:param cl_args:
:return:
]
variable[cluster] assign[=] call[name[cl_args]][constant[cluster]]
<ast.Try object at 0x7da20c6c7d30>
if <ast.UnaryOp object at 0x7da18ede4880> begin[:]
call[name[Log].error, parameter[constant[Cluster config directory '%s' does not exist], name[config_path]]]
return[call[name[dict], parameter[]]]
variable[config_path] assign[=] call[name[config].get_heron_cluster_conf_dir, parameter[name[cluster], name[config_path]]]
if <ast.UnaryOp object at 0x7da18ede42b0> begin[:]
call[name[Log].error, parameter[constant[Cluster config directory '%s' does not exist], name[config_path]]]
return[call[name[dict], parameter[]]]
call[name[Log].info, parameter[binary_operation[constant[Using cluster definition in %s] <ast.Mod object at 0x7da2590d6920> name[config_path]]]]
<ast.Try object at 0x7da18ede4460>
variable[new_cl_args] assign[=] call[name[dict], parameter[]]
call[name[new_cl_args]][constant[cluster]] assign[=] call[name[cluster_tuple]][constant[0]]
call[name[new_cl_args]][constant[role]] assign[=] call[name[cluster_tuple]][constant[1]]
call[name[new_cl_args]][constant[environ]] assign[=] call[name[cluster_tuple]][constant[2]]
call[name[new_cl_args]][constant[config_path]] assign[=] name[config_path]
call[name[new_cl_args]][constant[override_config_file]] assign[=] name[override_config_file]
call[name[new_cl_args]][constant[deploy_mode]] assign[=] name[config].DIRECT_MODE
call[name[cl_args].update, parameter[name[new_cl_args]]]
return[name[cl_args]] | keyword[def] identifier[direct_deployment_mode] ( identifier[command] , identifier[parser] , identifier[cluster] , identifier[cl_args] ):
literal[string]
identifier[cluster] = identifier[cl_args] [ literal[string] ]
keyword[try] :
identifier[config_path] = identifier[cl_args] [ literal[string] ]
identifier[override_config_file] = identifier[config] . identifier[parse_override_config_and_write_file] ( identifier[cl_args] [ literal[string] ])
keyword[except] identifier[KeyError] :
identifier[subparser] = identifier[config] . identifier[get_subparser] ( identifier[parser] , identifier[command] )
identifier[print] ( identifier[subparser] . identifier[format_help] ())
keyword[return] identifier[dict] ()
keyword[if] keyword[not] identifier[cdefs] . identifier[check_direct_mode_cluster_definition] ( identifier[cluster] , identifier[config_path] ):
identifier[Log] . identifier[error] ( literal[string] , identifier[config_path] )
keyword[return] identifier[dict] ()
identifier[config_path] = identifier[config] . identifier[get_heron_cluster_conf_dir] ( identifier[cluster] , identifier[config_path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[config_path] ):
identifier[Log] . identifier[error] ( literal[string] , identifier[config_path] )
keyword[return] identifier[dict] ()
identifier[Log] . identifier[info] ( literal[string] % identifier[config_path] )
keyword[try] :
identifier[cluster_role_env] =( identifier[cl_args] [ literal[string] ], identifier[cl_args] [ literal[string] ], identifier[cl_args] [ literal[string] ])
identifier[config] . identifier[direct_mode_cluster_role_env] ( identifier[cluster_role_env] , identifier[config_path] )
identifier[cluster_tuple] = identifier[config] . identifier[defaults_cluster_role_env] ( identifier[cluster_role_env] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[Log] . identifier[error] ( literal[string] , identifier[str] ( identifier[ex] ))
keyword[return] identifier[dict] ()
identifier[new_cl_args] = identifier[dict] ()
identifier[new_cl_args] [ literal[string] ]= identifier[cluster_tuple] [ literal[int] ]
identifier[new_cl_args] [ literal[string] ]= identifier[cluster_tuple] [ literal[int] ]
identifier[new_cl_args] [ literal[string] ]= identifier[cluster_tuple] [ literal[int] ]
identifier[new_cl_args] [ literal[string] ]= identifier[config_path]
identifier[new_cl_args] [ literal[string] ]= identifier[override_config_file]
identifier[new_cl_args] [ literal[string] ]= identifier[config] . identifier[DIRECT_MODE]
identifier[cl_args] . identifier[update] ( identifier[new_cl_args] )
keyword[return] identifier[cl_args] | def direct_deployment_mode(command, parser, cluster, cl_args):
"""
check the direct deployment mode for the given cluster
if it is valid return the valid set of args
:param command:
:param parser:
:param cluster:
:param cl_args:
:return:
"""
cluster = cl_args['cluster']
try:
config_path = cl_args['config_path']
override_config_file = config.parse_override_config_and_write_file(cl_args['config_property']) # depends on [control=['try'], data=[]]
except KeyError:
# if some of the arguments are not found, print error and exit
subparser = config.get_subparser(parser, command)
print(subparser.format_help())
return dict() # depends on [control=['except'], data=[]]
# check if the cluster config directory exists
if not cdefs.check_direct_mode_cluster_definition(cluster, config_path):
Log.error("Cluster config directory '%s' does not exist", config_path)
return dict() # depends on [control=['if'], data=[]]
config_path = config.get_heron_cluster_conf_dir(cluster, config_path)
if not os.path.isdir(config_path):
Log.error("Cluster config directory '%s' does not exist", config_path)
return dict() # depends on [control=['if'], data=[]]
Log.info('Using cluster definition in %s' % config_path)
try:
cluster_role_env = (cl_args['cluster'], cl_args['role'], cl_args['environ'])
config.direct_mode_cluster_role_env(cluster_role_env, config_path)
cluster_tuple = config.defaults_cluster_role_env(cluster_role_env) # depends on [control=['try'], data=[]]
except Exception as ex:
Log.error('Argument cluster/[role]/[env] is not correct: %s', str(ex))
return dict() # depends on [control=['except'], data=['ex']]
new_cl_args = dict()
new_cl_args['cluster'] = cluster_tuple[0]
new_cl_args['role'] = cluster_tuple[1]
new_cl_args['environ'] = cluster_tuple[2]
new_cl_args['config_path'] = config_path
new_cl_args['override_config_file'] = override_config_file
new_cl_args['deploy_mode'] = config.DIRECT_MODE
cl_args.update(new_cl_args)
return cl_args |
def color(self, values, ids=(), key_on='feature.id', palette='YlOrBr', **kwargs):
"""Color map features by binning values.
values -- a sequence of values or a table of keys and values
ids -- an ID for each value; if none are provided, indices are used
key_on -- attribute of each feature to match to ids
palette -- one of the following color brewer palettes:
'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu',
'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'.
Defaults from Folium:
threshold_scale: list, default None
Data range for D3 threshold scale. Defaults to the following range
of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest
order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000.
fill_opacity: float, default 0.6
Area fill opacity, range 0-1.
line_color: string, default 'black'
GeoJSON geopath line color.
line_weight: int, default 1
GeoJSON geopath line weight.
line_opacity: float, default 1
GeoJSON geopath line opacity, range 0-1.
legend_name: string, default None
Title for data legend. If not passed, defaults to columns[1].
"""
# Set values and ids to both be simple sequences by inspecting values
id_name, value_name = 'IDs', 'values'
if isinstance(values, collections.abc.Mapping):
assert not ids, 'IDs and a map cannot both be used together'
if hasattr(values, 'columns') and len(values.columns) == 2:
table = values
ids, values = table.columns
id_name, value_name = table.labels
else:
dictionary = values
ids, values = list(dictionary.keys()), list(dictionary.values())
if len(ids) != len(values):
assert len(ids) == 0
# Use indices as IDs
ids = list(range(len(values)))
m = self._create_map()
data = pandas.DataFrame({id_name: ids, value_name: values})
attrs = {
'geo_str': json.dumps(self.geojson()),
'data': data,
'columns': [id_name, value_name],
'key_on': key_on,
'fill_color': palette,
}
kwargs.update(attrs)
m.geo_json(**kwargs)
colored = self.format()
colored._folium_map = m
return colored | def function[color, parameter[self, values, ids, key_on, palette]]:
constant[Color map features by binning values.
values -- a sequence of values or a table of keys and values
ids -- an ID for each value; if none are provided, indices are used
key_on -- attribute of each feature to match to ids
palette -- one of the following color brewer palettes:
'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu',
'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'.
Defaults from Folium:
threshold_scale: list, default None
Data range for D3 threshold scale. Defaults to the following range
of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest
order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000.
fill_opacity: float, default 0.6
Area fill opacity, range 0-1.
line_color: string, default 'black'
GeoJSON geopath line color.
line_weight: int, default 1
GeoJSON geopath line weight.
line_opacity: float, default 1
GeoJSON geopath line opacity, range 0-1.
legend_name: string, default None
Title for data legend. If not passed, defaults to columns[1].
]
<ast.Tuple object at 0x7da1b07fff10> assign[=] tuple[[<ast.Constant object at 0x7da1b07fe9e0>, <ast.Constant object at 0x7da1b07fe1d0>]]
if call[name[isinstance], parameter[name[values], name[collections].abc.Mapping]] begin[:]
assert[<ast.UnaryOp object at 0x7da1b07fe770>]
if <ast.BoolOp object at 0x7da1b07ffca0> begin[:]
variable[table] assign[=] name[values]
<ast.Tuple object at 0x7da1b07e0820> assign[=] name[table].columns
<ast.Tuple object at 0x7da1b07e2f50> assign[=] name[table].labels
if compare[call[name[len], parameter[name[ids]]] not_equal[!=] call[name[len], parameter[name[values]]]] begin[:]
assert[compare[call[name[len], parameter[name[ids]]] equal[==] constant[0]]]
variable[ids] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[values]]]]]]]
variable[m] assign[=] call[name[self]._create_map, parameter[]]
variable[data] assign[=] call[name[pandas].DataFrame, parameter[dictionary[[<ast.Name object at 0x7da1b07e1900>, <ast.Name object at 0x7da1b07e39a0>], [<ast.Name object at 0x7da1b07e1f90>, <ast.Name object at 0x7da1b07e23b0>]]]]
variable[attrs] assign[=] dictionary[[<ast.Constant object at 0x7da1b07e0b20>, <ast.Constant object at 0x7da1b07e1fc0>, <ast.Constant object at 0x7da1b07e33a0>, <ast.Constant object at 0x7da1b07e30a0>, <ast.Constant object at 0x7da1b0731540>], [<ast.Call object at 0x7da1b0733430>, <ast.Name object at 0x7da1b07335e0>, <ast.List object at 0x7da1b0733a90>, <ast.Name object at 0x7da1b0733520>, <ast.Name object at 0x7da1b0733880>]]
call[name[kwargs].update, parameter[name[attrs]]]
call[name[m].geo_json, parameter[]]
variable[colored] assign[=] call[name[self].format, parameter[]]
name[colored]._folium_map assign[=] name[m]
return[name[colored]] | keyword[def] identifier[color] ( identifier[self] , identifier[values] , identifier[ids] =(), identifier[key_on] = literal[string] , identifier[palette] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[id_name] , identifier[value_name] = literal[string] , literal[string]
keyword[if] identifier[isinstance] ( identifier[values] , identifier[collections] . identifier[abc] . identifier[Mapping] ):
keyword[assert] keyword[not] identifier[ids] , literal[string]
keyword[if] identifier[hasattr] ( identifier[values] , literal[string] ) keyword[and] identifier[len] ( identifier[values] . identifier[columns] )== literal[int] :
identifier[table] = identifier[values]
identifier[ids] , identifier[values] = identifier[table] . identifier[columns]
identifier[id_name] , identifier[value_name] = identifier[table] . identifier[labels]
keyword[else] :
identifier[dictionary] = identifier[values]
identifier[ids] , identifier[values] = identifier[list] ( identifier[dictionary] . identifier[keys] ()), identifier[list] ( identifier[dictionary] . identifier[values] ())
keyword[if] identifier[len] ( identifier[ids] )!= identifier[len] ( identifier[values] ):
keyword[assert] identifier[len] ( identifier[ids] )== literal[int]
identifier[ids] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[values] )))
identifier[m] = identifier[self] . identifier[_create_map] ()
identifier[data] = identifier[pandas] . identifier[DataFrame] ({ identifier[id_name] : identifier[ids] , identifier[value_name] : identifier[values] })
identifier[attrs] ={
literal[string] : identifier[json] . identifier[dumps] ( identifier[self] . identifier[geojson] ()),
literal[string] : identifier[data] ,
literal[string] :[ identifier[id_name] , identifier[value_name] ],
literal[string] : identifier[key_on] ,
literal[string] : identifier[palette] ,
}
identifier[kwargs] . identifier[update] ( identifier[attrs] )
identifier[m] . identifier[geo_json] (** identifier[kwargs] )
identifier[colored] = identifier[self] . identifier[format] ()
identifier[colored] . identifier[_folium_map] = identifier[m]
keyword[return] identifier[colored] | def color(self, values, ids=(), key_on='feature.id', palette='YlOrBr', **kwargs):
"""Color map features by binning values.
values -- a sequence of values or a table of keys and values
ids -- an ID for each value; if none are provided, indices are used
key_on -- attribute of each feature to match to ids
palette -- one of the following color brewer palettes:
'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu',
'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'.
Defaults from Folium:
threshold_scale: list, default None
Data range for D3 threshold scale. Defaults to the following range
of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest
order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000.
fill_opacity: float, default 0.6
Area fill opacity, range 0-1.
line_color: string, default 'black'
GeoJSON geopath line color.
line_weight: int, default 1
GeoJSON geopath line weight.
line_opacity: float, default 1
GeoJSON geopath line opacity, range 0-1.
legend_name: string, default None
Title for data legend. If not passed, defaults to columns[1].
"""
# Set values and ids to both be simple sequences by inspecting values
(id_name, value_name) = ('IDs', 'values')
if isinstance(values, collections.abc.Mapping):
assert not ids, 'IDs and a map cannot both be used together'
if hasattr(values, 'columns') and len(values.columns) == 2:
table = values
(ids, values) = table.columns
(id_name, value_name) = table.labels # depends on [control=['if'], data=[]]
else:
dictionary = values
(ids, values) = (list(dictionary.keys()), list(dictionary.values())) # depends on [control=['if'], data=[]]
if len(ids) != len(values):
assert len(ids) == 0
# Use indices as IDs
ids = list(range(len(values))) # depends on [control=['if'], data=[]]
m = self._create_map()
data = pandas.DataFrame({id_name: ids, value_name: values})
attrs = {'geo_str': json.dumps(self.geojson()), 'data': data, 'columns': [id_name, value_name], 'key_on': key_on, 'fill_color': palette}
kwargs.update(attrs)
m.geo_json(**kwargs)
colored = self.format()
colored._folium_map = m
return colored |
def render_update(self, value, system, common_kw):
""" Render response for view `update` method (item PATCH) """
kw = self._get_create_update_kwargs(value, common_kw)
return JHTTPOk('Updated', **kw) | def function[render_update, parameter[self, value, system, common_kw]]:
constant[ Render response for view `update` method (item PATCH) ]
variable[kw] assign[=] call[name[self]._get_create_update_kwargs, parameter[name[value], name[common_kw]]]
return[call[name[JHTTPOk], parameter[constant[Updated]]]] | keyword[def] identifier[render_update] ( identifier[self] , identifier[value] , identifier[system] , identifier[common_kw] ):
literal[string]
identifier[kw] = identifier[self] . identifier[_get_create_update_kwargs] ( identifier[value] , identifier[common_kw] )
keyword[return] identifier[JHTTPOk] ( literal[string] ,** identifier[kw] ) | def render_update(self, value, system, common_kw):
""" Render response for view `update` method (item PATCH) """
kw = self._get_create_update_kwargs(value, common_kw)
return JHTTPOk('Updated', **kw) |
def _write_angle_information(gsd_file, structure):
"""Write the angles in the system.
Parameters
----------
gsd_file :
The file object of the GSD file being written
structure : parmed.Structure
Parmed structure object holding system information
"""
gsd_file.angles.N = len(structure.angles)
unique_angle_types = set()
for angle in structure.angles:
t1, t2, t3 = angle.atom1.type, angle.atom2.type, angle.atom3.type
t1, t3 = sorted([t1, t3], key=natural_sort)
angle_type = ('-'.join((t1, t2, t3)))
unique_angle_types.add(angle_type)
unique_angle_types = sorted(list(unique_angle_types), key=natural_sort)
gsd_file.angles.types = unique_angle_types
angle_typeids = []
angle_groups = []
for angle in structure.angles:
t1, t2, t3 = angle.atom1.type, angle.atom2.type, angle.atom3.type
t1, t3 = sorted([t1, t3], key=natural_sort)
angle_type = ('-'.join((t1, t2, t3)))
angle_typeids.append(unique_angle_types.index(angle_type))
angle_groups.append((angle.atom1.idx, angle.atom2.idx,
angle.atom3.idx))
gsd_file.angles.typeid = angle_typeids
gsd_file.angles.group = angle_groups | def function[_write_angle_information, parameter[gsd_file, structure]]:
constant[Write the angles in the system.
Parameters
----------
gsd_file :
The file object of the GSD file being written
structure : parmed.Structure
Parmed structure object holding system information
]
name[gsd_file].angles.N assign[=] call[name[len], parameter[name[structure].angles]]
variable[unique_angle_types] assign[=] call[name[set], parameter[]]
for taget[name[angle]] in starred[name[structure].angles] begin[:]
<ast.Tuple object at 0x7da1b20bf550> assign[=] tuple[[<ast.Attribute object at 0x7da1b20bc820>, <ast.Attribute object at 0x7da1b20bf400>, <ast.Attribute object at 0x7da1b20bf160>]]
<ast.Tuple object at 0x7da1b20bc8e0> assign[=] call[name[sorted], parameter[list[[<ast.Name object at 0x7da1b20bd6f0>, <ast.Name object at 0x7da1b20bd270>]]]]
variable[angle_type] assign[=] call[constant[-].join, parameter[tuple[[<ast.Name object at 0x7da1b20bfa30>, <ast.Name object at 0x7da1b20be410>, <ast.Name object at 0x7da1b20bc670>]]]]
call[name[unique_angle_types].add, parameter[name[angle_type]]]
variable[unique_angle_types] assign[=] call[name[sorted], parameter[call[name[list], parameter[name[unique_angle_types]]]]]
name[gsd_file].angles.types assign[=] name[unique_angle_types]
variable[angle_typeids] assign[=] list[[]]
variable[angle_groups] assign[=] list[[]]
for taget[name[angle]] in starred[name[structure].angles] begin[:]
<ast.Tuple object at 0x7da1b20bebf0> assign[=] tuple[[<ast.Attribute object at 0x7da1b20bd780>, <ast.Attribute object at 0x7da1b20bee90>, <ast.Attribute object at 0x7da1b20bf010>]]
<ast.Tuple object at 0x7da1b20bf7c0> assign[=] call[name[sorted], parameter[list[[<ast.Name object at 0x7da1b20bd210>, <ast.Name object at 0x7da1b20bcdf0>]]]]
variable[angle_type] assign[=] call[constant[-].join, parameter[tuple[[<ast.Name object at 0x7da1b1eeb190>, <ast.Name object at 0x7da1b1eeb040>, <ast.Name object at 0x7da1b1ee9570>]]]]
call[name[angle_typeids].append, parameter[call[name[unique_angle_types].index, parameter[name[angle_type]]]]]
call[name[angle_groups].append, parameter[tuple[[<ast.Attribute object at 0x7da1b1eea140>, <ast.Attribute object at 0x7da1b1eeb0d0>, <ast.Attribute object at 0x7da1b1eea860>]]]]
name[gsd_file].angles.typeid assign[=] name[angle_typeids]
name[gsd_file].angles.group assign[=] name[angle_groups] | keyword[def] identifier[_write_angle_information] ( identifier[gsd_file] , identifier[structure] ):
literal[string]
identifier[gsd_file] . identifier[angles] . identifier[N] = identifier[len] ( identifier[structure] . identifier[angles] )
identifier[unique_angle_types] = identifier[set] ()
keyword[for] identifier[angle] keyword[in] identifier[structure] . identifier[angles] :
identifier[t1] , identifier[t2] , identifier[t3] = identifier[angle] . identifier[atom1] . identifier[type] , identifier[angle] . identifier[atom2] . identifier[type] , identifier[angle] . identifier[atom3] . identifier[type]
identifier[t1] , identifier[t3] = identifier[sorted] ([ identifier[t1] , identifier[t3] ], identifier[key] = identifier[natural_sort] )
identifier[angle_type] =( literal[string] . identifier[join] (( identifier[t1] , identifier[t2] , identifier[t3] )))
identifier[unique_angle_types] . identifier[add] ( identifier[angle_type] )
identifier[unique_angle_types] = identifier[sorted] ( identifier[list] ( identifier[unique_angle_types] ), identifier[key] = identifier[natural_sort] )
identifier[gsd_file] . identifier[angles] . identifier[types] = identifier[unique_angle_types]
identifier[angle_typeids] =[]
identifier[angle_groups] =[]
keyword[for] identifier[angle] keyword[in] identifier[structure] . identifier[angles] :
identifier[t1] , identifier[t2] , identifier[t3] = identifier[angle] . identifier[atom1] . identifier[type] , identifier[angle] . identifier[atom2] . identifier[type] , identifier[angle] . identifier[atom3] . identifier[type]
identifier[t1] , identifier[t3] = identifier[sorted] ([ identifier[t1] , identifier[t3] ], identifier[key] = identifier[natural_sort] )
identifier[angle_type] =( literal[string] . identifier[join] (( identifier[t1] , identifier[t2] , identifier[t3] )))
identifier[angle_typeids] . identifier[append] ( identifier[unique_angle_types] . identifier[index] ( identifier[angle_type] ))
identifier[angle_groups] . identifier[append] (( identifier[angle] . identifier[atom1] . identifier[idx] , identifier[angle] . identifier[atom2] . identifier[idx] ,
identifier[angle] . identifier[atom3] . identifier[idx] ))
identifier[gsd_file] . identifier[angles] . identifier[typeid] = identifier[angle_typeids]
identifier[gsd_file] . identifier[angles] . identifier[group] = identifier[angle_groups] | def _write_angle_information(gsd_file, structure):
"""Write the angles in the system.
Parameters
----------
gsd_file :
The file object of the GSD file being written
structure : parmed.Structure
Parmed structure object holding system information
"""
gsd_file.angles.N = len(structure.angles)
unique_angle_types = set()
for angle in structure.angles:
(t1, t2, t3) = (angle.atom1.type, angle.atom2.type, angle.atom3.type)
(t1, t3) = sorted([t1, t3], key=natural_sort)
angle_type = '-'.join((t1, t2, t3))
unique_angle_types.add(angle_type) # depends on [control=['for'], data=['angle']]
unique_angle_types = sorted(list(unique_angle_types), key=natural_sort)
gsd_file.angles.types = unique_angle_types
angle_typeids = []
angle_groups = []
for angle in structure.angles:
(t1, t2, t3) = (angle.atom1.type, angle.atom2.type, angle.atom3.type)
(t1, t3) = sorted([t1, t3], key=natural_sort)
angle_type = '-'.join((t1, t2, t3))
angle_typeids.append(unique_angle_types.index(angle_type))
angle_groups.append((angle.atom1.idx, angle.atom2.idx, angle.atom3.idx)) # depends on [control=['for'], data=['angle']]
gsd_file.angles.typeid = angle_typeids
gsd_file.angles.group = angle_groups |
def getXML(self):
"""Retrieves the pysvg elements that make up the turtles path and returns them as String in an xml representation.
"""
s = ''
for element in self._svgElements:
s += element.getXML()
return s | def function[getXML, parameter[self]]:
constant[Retrieves the pysvg elements that make up the turtles path and returns them as String in an xml representation.
]
variable[s] assign[=] constant[]
for taget[name[element]] in starred[name[self]._svgElements] begin[:]
<ast.AugAssign object at 0x7da18f811ea0>
return[name[s]] | keyword[def] identifier[getXML] ( identifier[self] ):
literal[string]
identifier[s] = literal[string]
keyword[for] identifier[element] keyword[in] identifier[self] . identifier[_svgElements] :
identifier[s] += identifier[element] . identifier[getXML] ()
keyword[return] identifier[s] | def getXML(self):
"""Retrieves the pysvg elements that make up the turtles path and returns them as String in an xml representation.
"""
s = ''
for element in self._svgElements:
s += element.getXML() # depends on [control=['for'], data=['element']]
return s |
def calc_neg_log_likelihood_and_neg_gradient(self, params):
"""
Calculates and returns the negative of the log-likelihood and the
negative of the gradient. This function is used as the objective
function in scipy.optimize.minimize.
"""
neg_log_likelihood = -1 * self.convenience_calc_log_likelihood(params)
neg_gradient = -1 * self.convenience_calc_gradient(params)
if self.constrained_pos is not None:
neg_gradient[self.constrained_pos] = 0
return neg_log_likelihood, neg_gradient | def function[calc_neg_log_likelihood_and_neg_gradient, parameter[self, params]]:
constant[
Calculates and returns the negative of the log-likelihood and the
negative of the gradient. This function is used as the objective
function in scipy.optimize.minimize.
]
variable[neg_log_likelihood] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c993a30> * call[name[self].convenience_calc_log_likelihood, parameter[name[params]]]]
variable[neg_gradient] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c992d70> * call[name[self].convenience_calc_gradient, parameter[name[params]]]]
if compare[name[self].constrained_pos is_not constant[None]] begin[:]
call[name[neg_gradient]][name[self].constrained_pos] assign[=] constant[0]
return[tuple[[<ast.Name object at 0x7da20c993400>, <ast.Name object at 0x7da20c992380>]]] | keyword[def] identifier[calc_neg_log_likelihood_and_neg_gradient] ( identifier[self] , identifier[params] ):
literal[string]
identifier[neg_log_likelihood] =- literal[int] * identifier[self] . identifier[convenience_calc_log_likelihood] ( identifier[params] )
identifier[neg_gradient] =- literal[int] * identifier[self] . identifier[convenience_calc_gradient] ( identifier[params] )
keyword[if] identifier[self] . identifier[constrained_pos] keyword[is] keyword[not] keyword[None] :
identifier[neg_gradient] [ identifier[self] . identifier[constrained_pos] ]= literal[int]
keyword[return] identifier[neg_log_likelihood] , identifier[neg_gradient] | def calc_neg_log_likelihood_and_neg_gradient(self, params):
"""
Calculates and returns the negative of the log-likelihood and the
negative of the gradient. This function is used as the objective
function in scipy.optimize.minimize.
"""
neg_log_likelihood = -1 * self.convenience_calc_log_likelihood(params)
neg_gradient = -1 * self.convenience_calc_gradient(params)
if self.constrained_pos is not None:
neg_gradient[self.constrained_pos] = 0 # depends on [control=['if'], data=[]]
return (neg_log_likelihood, neg_gradient) |
def add_edge(self, u, v, **kwargs):
"""
Add an edge between variable_node and factor_node.
Parameters
----------
u, v: nodes
Nodes can be any hashable Python object.
Examples
--------
>>> from pgmpy.models import FactorGraph
>>> G = FactorGraph()
>>> G.add_nodes_from(['a', 'b', 'c'])
>>> phi1 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> G.add_nodes_from([phi1, phi2])
>>> G.add_edge('a', phi1)
"""
if u != v:
super(FactorGraph, self).add_edge(u, v, **kwargs)
else:
raise ValueError('Self loops are not allowed') | def function[add_edge, parameter[self, u, v]]:
constant[
Add an edge between variable_node and factor_node.
Parameters
----------
u, v: nodes
Nodes can be any hashable Python object.
Examples
--------
>>> from pgmpy.models import FactorGraph
>>> G = FactorGraph()
>>> G.add_nodes_from(['a', 'b', 'c'])
>>> phi1 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> G.add_nodes_from([phi1, phi2])
>>> G.add_edge('a', phi1)
]
if compare[name[u] not_equal[!=] name[v]] begin[:]
call[call[name[super], parameter[name[FactorGraph], name[self]]].add_edge, parameter[name[u], name[v]]] | keyword[def] identifier[add_edge] ( identifier[self] , identifier[u] , identifier[v] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[u] != identifier[v] :
identifier[super] ( identifier[FactorGraph] , identifier[self] ). identifier[add_edge] ( identifier[u] , identifier[v] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def add_edge(self, u, v, **kwargs):
"""
Add an edge between variable_node and factor_node.
Parameters
----------
u, v: nodes
Nodes can be any hashable Python object.
Examples
--------
>>> from pgmpy.models import FactorGraph
>>> G = FactorGraph()
>>> G.add_nodes_from(['a', 'b', 'c'])
>>> phi1 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> G.add_nodes_from([phi1, phi2])
>>> G.add_edge('a', phi1)
"""
if u != v:
super(FactorGraph, self).add_edge(u, v, **kwargs) # depends on [control=['if'], data=['u', 'v']]
else:
raise ValueError('Self loops are not allowed') |
def name(self, name: str):
""" Name Setter
Set name with passed in variable.
@param name: New name string.
@type name: String
"""
self.pathName = os.path.join(self.path, name) | def function[name, parameter[self, name]]:
constant[ Name Setter
Set name with passed in variable.
@param name: New name string.
@type name: String
]
name[self].pathName assign[=] call[name[os].path.join, parameter[name[self].path, name[name]]] | keyword[def] identifier[name] ( identifier[self] , identifier[name] : identifier[str] ):
literal[string]
identifier[self] . identifier[pathName] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[name] ) | def name(self, name: str):
""" Name Setter
Set name with passed in variable.
@param name: New name string.
@type name: String
"""
self.pathName = os.path.join(self.path, name) |
def appendhdf5(table, source, where=None, name=None):
"""
As :func:`petl.io.hdf5.tohdf5` but don't truncate the target table before
loading.
"""
with _get_hdf5_table(source, where, name, mode='a') as h5table:
# load the data
_insert(table, h5table) | def function[appendhdf5, parameter[table, source, where, name]]:
constant[
As :func:`petl.io.hdf5.tohdf5` but don't truncate the target table before
loading.
]
with call[name[_get_hdf5_table], parameter[name[source], name[where], name[name]]] begin[:]
call[name[_insert], parameter[name[table], name[h5table]]] | keyword[def] identifier[appendhdf5] ( identifier[table] , identifier[source] , identifier[where] = keyword[None] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[_get_hdf5_table] ( identifier[source] , identifier[where] , identifier[name] , identifier[mode] = literal[string] ) keyword[as] identifier[h5table] :
identifier[_insert] ( identifier[table] , identifier[h5table] ) | def appendhdf5(table, source, where=None, name=None):
"""
As :func:`petl.io.hdf5.tohdf5` but don't truncate the target table before
loading.
"""
with _get_hdf5_table(source, where, name, mode='a') as h5table:
# load the data
_insert(table, h5table) # depends on [control=['with'], data=['h5table']] |
def pformat(arg, width=79, height=24, compact=True):
"""Return pretty formatted representation of object as string.
Whitespace might be altered.
"""
if height is None or height < 1:
height = 1024
if width is None or width < 1:
width = 256
npopt = numpy.get_printoptions()
numpy.set_printoptions(threshold=100, linewidth=width)
if isinstance(arg, basestring):
if arg[:5].lower() in ('<?xml', b'<?xml'):
if isinstance(arg, bytes):
arg = bytes2str(arg)
if height == 1:
arg = arg[:4*width]
else:
arg = pformat_xml(arg)
elif isinstance(arg, bytes):
if isprintable(arg):
arg = bytes2str(arg)
arg = clean_whitespace(arg)
else:
numpy.set_printoptions(**npopt)
return hexdump(arg, width=width, height=height, modulo=1)
arg = arg.rstrip()
elif isinstance(arg, numpy.record):
arg = arg.pprint()
else:
import pprint # delayed import
compact = {} if sys.version_info[0] == 2 else dict(compact=compact)
arg = pprint.pformat(arg, width=width, **compact)
numpy.set_printoptions(**npopt)
if height == 1:
arg = clean_whitespace(arg, compact=True)
return arg[:width]
argl = list(arg.splitlines())
if len(argl) > height:
arg = '\n'.join(argl[:height//2] + ['...'] + argl[-height//2:])
return arg | def function[pformat, parameter[arg, width, height, compact]]:
constant[Return pretty formatted representation of object as string.
Whitespace might be altered.
]
if <ast.BoolOp object at 0x7da20e9b06d0> begin[:]
variable[height] assign[=] constant[1024]
if <ast.BoolOp object at 0x7da20e9b15d0> begin[:]
variable[width] assign[=] constant[256]
variable[npopt] assign[=] call[name[numpy].get_printoptions, parameter[]]
call[name[numpy].set_printoptions, parameter[]]
if call[name[isinstance], parameter[name[arg], name[basestring]]] begin[:]
if compare[call[call[name[arg]][<ast.Slice object at 0x7da20e9b2590>].lower, parameter[]] in tuple[[<ast.Constant object at 0x7da20e9b0a60>, <ast.Constant object at 0x7da20e9b1450>]]] begin[:]
if call[name[isinstance], parameter[name[arg], name[bytes]]] begin[:]
variable[arg] assign[=] call[name[bytes2str], parameter[name[arg]]]
if compare[name[height] equal[==] constant[1]] begin[:]
variable[arg] assign[=] call[name[arg]][<ast.Slice object at 0x7da20e9b2260>]
variable[arg] assign[=] call[name[arg].rstrip, parameter[]]
call[name[numpy].set_printoptions, parameter[]]
if compare[name[height] equal[==] constant[1]] begin[:]
variable[arg] assign[=] call[name[clean_whitespace], parameter[name[arg]]]
return[call[name[arg]][<ast.Slice object at 0x7da1b198f190>]]
variable[argl] assign[=] call[name[list], parameter[call[name[arg].splitlines, parameter[]]]]
if compare[call[name[len], parameter[name[argl]]] greater[>] name[height]] begin[:]
variable[arg] assign[=] call[constant[
].join, parameter[binary_operation[binary_operation[call[name[argl]][<ast.Slice object at 0x7da1b198e4a0>] + list[[<ast.Constant object at 0x7da1b198e1a0>]]] + call[name[argl]][<ast.Slice object at 0x7da1b198e560>]]]]
return[name[arg]] | keyword[def] identifier[pformat] ( identifier[arg] , identifier[width] = literal[int] , identifier[height] = literal[int] , identifier[compact] = keyword[True] ):
literal[string]
keyword[if] identifier[height] keyword[is] keyword[None] keyword[or] identifier[height] < literal[int] :
identifier[height] = literal[int]
keyword[if] identifier[width] keyword[is] keyword[None] keyword[or] identifier[width] < literal[int] :
identifier[width] = literal[int]
identifier[npopt] = identifier[numpy] . identifier[get_printoptions] ()
identifier[numpy] . identifier[set_printoptions] ( identifier[threshold] = literal[int] , identifier[linewidth] = identifier[width] )
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[basestring] ):
keyword[if] identifier[arg] [: literal[int] ]. identifier[lower] () keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[bytes] ):
identifier[arg] = identifier[bytes2str] ( identifier[arg] )
keyword[if] identifier[height] == literal[int] :
identifier[arg] = identifier[arg] [: literal[int] * identifier[width] ]
keyword[else] :
identifier[arg] = identifier[pformat_xml] ( identifier[arg] )
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[bytes] ):
keyword[if] identifier[isprintable] ( identifier[arg] ):
identifier[arg] = identifier[bytes2str] ( identifier[arg] )
identifier[arg] = identifier[clean_whitespace] ( identifier[arg] )
keyword[else] :
identifier[numpy] . identifier[set_printoptions] (** identifier[npopt] )
keyword[return] identifier[hexdump] ( identifier[arg] , identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[modulo] = literal[int] )
identifier[arg] = identifier[arg] . identifier[rstrip] ()
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[numpy] . identifier[record] ):
identifier[arg] = identifier[arg] . identifier[pprint] ()
keyword[else] :
keyword[import] identifier[pprint]
identifier[compact] ={} keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] keyword[else] identifier[dict] ( identifier[compact] = identifier[compact] )
identifier[arg] = identifier[pprint] . identifier[pformat] ( identifier[arg] , identifier[width] = identifier[width] ,** identifier[compact] )
identifier[numpy] . identifier[set_printoptions] (** identifier[npopt] )
keyword[if] identifier[height] == literal[int] :
identifier[arg] = identifier[clean_whitespace] ( identifier[arg] , identifier[compact] = keyword[True] )
keyword[return] identifier[arg] [: identifier[width] ]
identifier[argl] = identifier[list] ( identifier[arg] . identifier[splitlines] ())
keyword[if] identifier[len] ( identifier[argl] )> identifier[height] :
identifier[arg] = literal[string] . identifier[join] ( identifier[argl] [: identifier[height] // literal[int] ]+[ literal[string] ]+ identifier[argl] [- identifier[height] // literal[int] :])
keyword[return] identifier[arg] | def pformat(arg, width=79, height=24, compact=True):
"""Return pretty formatted representation of object as string.
Whitespace might be altered.
"""
if height is None or height < 1:
height = 1024 # depends on [control=['if'], data=[]]
if width is None or width < 1:
width = 256 # depends on [control=['if'], data=[]]
npopt = numpy.get_printoptions()
numpy.set_printoptions(threshold=100, linewidth=width)
if isinstance(arg, basestring):
if arg[:5].lower() in ('<?xml', b'<?xml'):
if isinstance(arg, bytes):
arg = bytes2str(arg) # depends on [control=['if'], data=[]]
if height == 1:
arg = arg[:4 * width] # depends on [control=['if'], data=[]]
else:
arg = pformat_xml(arg) # depends on [control=['if'], data=[]]
elif isinstance(arg, bytes):
if isprintable(arg):
arg = bytes2str(arg)
arg = clean_whitespace(arg) # depends on [control=['if'], data=[]]
else:
numpy.set_printoptions(**npopt)
return hexdump(arg, width=width, height=height, modulo=1) # depends on [control=['if'], data=[]]
arg = arg.rstrip() # depends on [control=['if'], data=[]]
elif isinstance(arg, numpy.record):
arg = arg.pprint() # depends on [control=['if'], data=[]]
else:
import pprint # delayed import
compact = {} if sys.version_info[0] == 2 else dict(compact=compact)
arg = pprint.pformat(arg, width=width, **compact)
numpy.set_printoptions(**npopt)
if height == 1:
arg = clean_whitespace(arg, compact=True)
return arg[:width] # depends on [control=['if'], data=[]]
argl = list(arg.splitlines())
if len(argl) > height:
arg = '\n'.join(argl[:height // 2] + ['...'] + argl[-height // 2:]) # depends on [control=['if'], data=['height']]
return arg |
def get_after(self, timestamp, s=None):
"""
Find all the (available) logs that are after the given time stamp.
If `s` is not supplied, then all lines are used. Otherwise, only the
lines contain the `s` are used. `s` can be either a single string or a
strings list. For list, all keywords in the list must be found in the
line.
.. note::
The time stamp is time type instead of usual datetime type. If
a time stamp is not found on the line between square brackets, then
it is treated as a continuation of the previous line and is only
included if the previous line's timestamp is greater than the
timestamp given. Because continuation lines are only included if a
previous line has matched, this means that searching in logs that do
not have a time stamp produces no lines.
Parameters:
timestamp(time): log lines after this time are returned.
s(str or list): one or more strings to search for.
If not supplied, all available lines are searched.
Yields:
Log lines with time stamps after the given time.
Raises:
TypeError: The ``timestamp`` should be in `time` type, otherwise a
`TypeError` will be raised.
"""
if not isinstance(timestamp, time):
raise TypeError(
"get_after needs a time type timestamp, but get '{c}'".format(
c=timestamp)
)
including_lines = False
search_by_expression = self._valid_search(s)
for line in self.lines:
# If `s` is not None, keywords must be found in the line
if s and not search_by_expression(line):
continue
# Otherwise, search all lines
match = self._line_re.search(line)
if match and match.group('timestamp'):
# Get logtimestamp and compare to given timestamp
l_hh, l_mm, l_ss = match.group('timestamp').split(":")
logstamp = time(int(l_hh), int(l_mm), int(l_ss))
if logstamp >= timestamp:
including_lines = True
yield self._parse_line(line)
else:
including_lines = False
else:
# If we're including lines, add this continuation line
if including_lines:
yield self._parse_line(line) | def function[get_after, parameter[self, timestamp, s]]:
constant[
Find all the (available) logs that are after the given time stamp.
If `s` is not supplied, then all lines are used. Otherwise, only the
lines contain the `s` are used. `s` can be either a single string or a
strings list. For list, all keywords in the list must be found in the
line.
.. note::
The time stamp is time type instead of usual datetime type. If
a time stamp is not found on the line between square brackets, then
it is treated as a continuation of the previous line and is only
included if the previous line's timestamp is greater than the
timestamp given. Because continuation lines are only included if a
previous line has matched, this means that searching in logs that do
not have a time stamp produces no lines.
Parameters:
timestamp(time): log lines after this time are returned.
s(str or list): one or more strings to search for.
If not supplied, all available lines are searched.
Yields:
Log lines with time stamps after the given time.
Raises:
TypeError: The ``timestamp`` should be in `time` type, otherwise a
`TypeError` will be raised.
]
if <ast.UnaryOp object at 0x7da18f813f40> begin[:]
<ast.Raise object at 0x7da18f813220>
variable[including_lines] assign[=] constant[False]
variable[search_by_expression] assign[=] call[name[self]._valid_search, parameter[name[s]]]
for taget[name[line]] in starred[name[self].lines] begin[:]
if <ast.BoolOp object at 0x7da18f8134f0> begin[:]
continue
variable[match] assign[=] call[name[self]._line_re.search, parameter[name[line]]]
if <ast.BoolOp object at 0x7da18f8122c0> begin[:]
<ast.Tuple object at 0x7da18f811720> assign[=] call[call[name[match].group, parameter[constant[timestamp]]].split, parameter[constant[:]]]
variable[logstamp] assign[=] call[name[time], parameter[call[name[int], parameter[name[l_hh]]], call[name[int], parameter[name[l_mm]]], call[name[int], parameter[name[l_ss]]]]]
if compare[name[logstamp] greater_or_equal[>=] name[timestamp]] begin[:]
variable[including_lines] assign[=] constant[True]
<ast.Yield object at 0x7da18f8131c0> | keyword[def] identifier[get_after] ( identifier[self] , identifier[timestamp] , identifier[s] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[timestamp] , identifier[time] ):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] (
identifier[c] = identifier[timestamp] )
)
identifier[including_lines] = keyword[False]
identifier[search_by_expression] = identifier[self] . identifier[_valid_search] ( identifier[s] )
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[lines] :
keyword[if] identifier[s] keyword[and] keyword[not] identifier[search_by_expression] ( identifier[line] ):
keyword[continue]
identifier[match] = identifier[self] . identifier[_line_re] . identifier[search] ( identifier[line] )
keyword[if] identifier[match] keyword[and] identifier[match] . identifier[group] ( literal[string] ):
identifier[l_hh] , identifier[l_mm] , identifier[l_ss] = identifier[match] . identifier[group] ( literal[string] ). identifier[split] ( literal[string] )
identifier[logstamp] = identifier[time] ( identifier[int] ( identifier[l_hh] ), identifier[int] ( identifier[l_mm] ), identifier[int] ( identifier[l_ss] ))
keyword[if] identifier[logstamp] >= identifier[timestamp] :
identifier[including_lines] = keyword[True]
keyword[yield] identifier[self] . identifier[_parse_line] ( identifier[line] )
keyword[else] :
identifier[including_lines] = keyword[False]
keyword[else] :
keyword[if] identifier[including_lines] :
keyword[yield] identifier[self] . identifier[_parse_line] ( identifier[line] ) | def get_after(self, timestamp, s=None):
"""
Find all the (available) logs that are after the given time stamp.
If `s` is not supplied, then all lines are used. Otherwise, only the
lines contain the `s` are used. `s` can be either a single string or a
strings list. For list, all keywords in the list must be found in the
line.
.. note::
The time stamp is time type instead of usual datetime type. If
a time stamp is not found on the line between square brackets, then
it is treated as a continuation of the previous line and is only
included if the previous line's timestamp is greater than the
timestamp given. Because continuation lines are only included if a
previous line has matched, this means that searching in logs that do
not have a time stamp produces no lines.
Parameters:
timestamp(time): log lines after this time are returned.
s(str or list): one or more strings to search for.
If not supplied, all available lines are searched.
Yields:
Log lines with time stamps after the given time.
Raises:
TypeError: The ``timestamp`` should be in `time` type, otherwise a
`TypeError` will be raised.
"""
if not isinstance(timestamp, time):
raise TypeError("get_after needs a time type timestamp, but get '{c}'".format(c=timestamp)) # depends on [control=['if'], data=[]]
including_lines = False
search_by_expression = self._valid_search(s)
for line in self.lines:
# If `s` is not None, keywords must be found in the line
if s and (not search_by_expression(line)):
continue # depends on [control=['if'], data=[]]
# Otherwise, search all lines
match = self._line_re.search(line)
if match and match.group('timestamp'):
# Get logtimestamp and compare to given timestamp
(l_hh, l_mm, l_ss) = match.group('timestamp').split(':')
logstamp = time(int(l_hh), int(l_mm), int(l_ss))
if logstamp >= timestamp:
including_lines = True
yield self._parse_line(line) # depends on [control=['if'], data=[]]
else:
including_lines = False # depends on [control=['if'], data=[]]
# If we're including lines, add this continuation line
elif including_lines:
yield self._parse_line(line) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] |
def plot_line_loading_diff(networkA, networkB, timestep=0):
"""
Plot difference in line loading between two networks
(with and without switches) as color on lines
Positive values mean that line loading with switches is bigger than without
Plot switches as small dots
Parameters
----------
networkA : PyPSA network container
Holds topology of grid with switches
including results from powerflow analysis
networkB : PyPSA network container
Holds topology of grid without switches
including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
timestep : int
timestep to show, default is 0
"""
# new colormap to make sure 0% difference has the same color in every plot
def shiftedColorMap(
cmap,
start=0,
midpoint=0.5,
stop=1.0,
name='shiftedcmap'):
'''
Function to offset the "center" of a colormap. Useful for
data with a negative min and positive max and you want the
middle of the colormap's dynamic range to be at zero
Input
-----
cmap : The matplotlib colormap to be altered
start : Offset from lowest point in the colormap's range.
Defaults to 0.0 (no lower ofset). Should be between
0.0 and `midpoint`.
midpoint : The new center of the colormap. Defaults to
0.5 (no shift). Should be between 0.0 and 1.0. In
general, this should be 1 - vmax/(vmax + abs(vmin))
For example if your data range from -15.0 to +5.0 and
you want the center of the colormap at 0.0, `midpoint`
should be set to 1 - 5/(5 + 15)) or 0.75
stop : Offset from highets point in the colormap's range.
Defaults to 1.0 (no upper ofset). Should be between
`midpoint` and 1.0.
'''
cdict = {
'red': [],
'green': [],
'blue': [],
'alpha': []
}
# regular index to compute the colors
reg_index = np.linspace(start, stop, 257)
# shifted index to match the data
shift_index = np.hstack([
np.linspace(0.0, midpoint, 128, endpoint=False),
np.linspace(midpoint, 1.0, 129, endpoint=True)
])
for ri, si in zip(reg_index, shift_index):
r, g, b, a = cmap(ri)
cdict['red'].append((si, r, r))
cdict['green'].append((si, g, g))
cdict['blue'].append((si, b, b))
cdict['alpha'].append((si, a, a))
newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict)
plt.register_cmap(cmap=newcmap)
return newcmap
# calculate difference in loading between both networks
loading_switches = abs(
networkA.lines_t.p0.mul(networkA.snapshot_weightings, axis=0).\
loc[networkA.snapshots[timestep]].to_frame())
loading_switches.columns = ['switch']
loading_noswitches = abs(
networkB.lines_t.p0.mul(networkB.snapshot_weightings, axis=0).\
loc[networkB.snapshots[timestep]].to_frame())
loading_noswitches.columns = ['noswitch']
diff_network = loading_switches.join(loading_noswitches)
diff_network['noswitch'] = diff_network['noswitch'].fillna(
diff_network['switch'])
diff_network[networkA.snapshots[timestep]] \
= diff_network['switch'] - diff_network['noswitch']
# get switches
new_buses = pd.Series(index=networkA.buses.index.values)
new_buses.loc[set(networkA.buses.index.values) -
set(networkB.buses.index.values)] = 0.1
new_buses = new_buses.fillna(0)
# plot network with difference in loading and shifted colormap
loading = (diff_network.loc[:, networkA.snapshots[timestep]] /
(networkA.lines.s_nom)) * 100
midpoint = 1 - max(loading) / (max(loading) + abs(min(loading)))
shifted_cmap = shiftedColorMap(
plt.cm.jet, midpoint=midpoint, name='shifted')
ll = networkA.plot(line_colors=loading, line_cmap=shifted_cmap,
title="Line loading", bus_sizes=new_buses,
bus_colors='blue', line_widths=0.55)
cb = plt.colorbar(ll[1])
cb.set_label('Difference in line loading in % of s_nom') | def function[plot_line_loading_diff, parameter[networkA, networkB, timestep]]:
constant[
Plot difference in line loading between two networks
(with and without switches) as color on lines
Positive values mean that line loading with switches is bigger than without
Plot switches as small dots
Parameters
----------
networkA : PyPSA network container
Holds topology of grid with switches
including results from powerflow analysis
networkB : PyPSA network container
Holds topology of grid without switches
including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
timestep : int
timestep to show, default is 0
]
def function[shiftedColorMap, parameter[cmap, start, midpoint, stop, name]]:
constant[
Function to offset the "center" of a colormap. Useful for
data with a negative min and positive max and you want the
middle of the colormap's dynamic range to be at zero
Input
-----
cmap : The matplotlib colormap to be altered
start : Offset from lowest point in the colormap's range.
Defaults to 0.0 (no lower ofset). Should be between
0.0 and `midpoint`.
midpoint : The new center of the colormap. Defaults to
0.5 (no shift). Should be between 0.0 and 1.0. In
general, this should be 1 - vmax/(vmax + abs(vmin))
For example if your data range from -15.0 to +5.0 and
you want the center of the colormap at 0.0, `midpoint`
should be set to 1 - 5/(5 + 15)) or 0.75
stop : Offset from highets point in the colormap's range.
Defaults to 1.0 (no upper ofset). Should be between
`midpoint` and 1.0.
]
variable[cdict] assign[=] dictionary[[<ast.Constant object at 0x7da2044c3490>, <ast.Constant object at 0x7da2044c34c0>, <ast.Constant object at 0x7da2044c2aa0>, <ast.Constant object at 0x7da2044c0340>], [<ast.List object at 0x7da2044c1600>, <ast.List object at 0x7da2044c14e0>, <ast.List object at 0x7da2044c3ca0>, <ast.List object at 0x7da2044c2980>]]
variable[reg_index] assign[=] call[name[np].linspace, parameter[name[start], name[stop], constant[257]]]
variable[shift_index] assign[=] call[name[np].hstack, parameter[list[[<ast.Call object at 0x7da2044c1300>, <ast.Call object at 0x7da2044c1690>]]]]
for taget[tuple[[<ast.Name object at 0x7da2044c0040>, <ast.Name object at 0x7da2044c3f10>]]] in starred[call[name[zip], parameter[name[reg_index], name[shift_index]]]] begin[:]
<ast.Tuple object at 0x7da2044c0550> assign[=] call[name[cmap], parameter[name[ri]]]
call[call[name[cdict]][constant[red]].append, parameter[tuple[[<ast.Name object at 0x7da2044c28c0>, <ast.Name object at 0x7da2044c1bd0>, <ast.Name object at 0x7da2044c3400>]]]]
call[call[name[cdict]][constant[green]].append, parameter[tuple[[<ast.Name object at 0x7da2044c1ab0>, <ast.Name object at 0x7da2044c1e70>, <ast.Name object at 0x7da2044c1b10>]]]]
call[call[name[cdict]][constant[blue]].append, parameter[tuple[[<ast.Name object at 0x7da2044c10f0>, <ast.Name object at 0x7da2044c1960>, <ast.Name object at 0x7da2044c0fd0>]]]]
call[call[name[cdict]][constant[alpha]].append, parameter[tuple[[<ast.Name object at 0x7da2044c16f0>, <ast.Name object at 0x7da2044c2320>, <ast.Name object at 0x7da2044c3940>]]]]
variable[newcmap] assign[=] call[name[matplotlib].colors.LinearSegmentedColormap, parameter[name[name], name[cdict]]]
call[name[plt].register_cmap, parameter[]]
return[name[newcmap]]
variable[loading_switches] assign[=] call[name[abs], parameter[call[call[call[name[networkA].lines_t.p0.mul, parameter[name[networkA].snapshot_weightings]].loc][call[name[networkA].snapshots][name[timestep]]].to_frame, parameter[]]]]
name[loading_switches].columns assign[=] list[[<ast.Constant object at 0x7da2044c2230>]]
variable[loading_noswitches] assign[=] call[name[abs], parameter[call[call[call[name[networkB].lines_t.p0.mul, parameter[name[networkB].snapshot_weightings]].loc][call[name[networkB].snapshots][name[timestep]]].to_frame, parameter[]]]]
name[loading_noswitches].columns assign[=] list[[<ast.Constant object at 0x7da2044c1990>]]
variable[diff_network] assign[=] call[name[loading_switches].join, parameter[name[loading_noswitches]]]
call[name[diff_network]][constant[noswitch]] assign[=] call[call[name[diff_network]][constant[noswitch]].fillna, parameter[call[name[diff_network]][constant[switch]]]]
call[name[diff_network]][call[name[networkA].snapshots][name[timestep]]] assign[=] binary_operation[call[name[diff_network]][constant[switch]] - call[name[diff_network]][constant[noswitch]]]
variable[new_buses] assign[=] call[name[pd].Series, parameter[]]
call[name[new_buses].loc][binary_operation[call[name[set], parameter[name[networkA].buses.index.values]] - call[name[set], parameter[name[networkB].buses.index.values]]]] assign[=] constant[0.1]
variable[new_buses] assign[=] call[name[new_buses].fillna, parameter[constant[0]]]
variable[loading] assign[=] binary_operation[binary_operation[call[name[diff_network].loc][tuple[[<ast.Slice object at 0x7da18bc70220>, <ast.Subscript object at 0x7da18bc72500>]]] / name[networkA].lines.s_nom] * constant[100]]
variable[midpoint] assign[=] binary_operation[constant[1] - binary_operation[call[name[max], parameter[name[loading]]] / binary_operation[call[name[max], parameter[name[loading]]] + call[name[abs], parameter[call[name[min], parameter[name[loading]]]]]]]]
variable[shifted_cmap] assign[=] call[name[shiftedColorMap], parameter[name[plt].cm.jet]]
variable[ll] assign[=] call[name[networkA].plot, parameter[]]
variable[cb] assign[=] call[name[plt].colorbar, parameter[call[name[ll]][constant[1]]]]
call[name[cb].set_label, parameter[constant[Difference in line loading in % of s_nom]]] | keyword[def] identifier[plot_line_loading_diff] ( identifier[networkA] , identifier[networkB] , identifier[timestep] = literal[int] ):
literal[string]
keyword[def] identifier[shiftedColorMap] (
identifier[cmap] ,
identifier[start] = literal[int] ,
identifier[midpoint] = literal[int] ,
identifier[stop] = literal[int] ,
identifier[name] = literal[string] ):
literal[string]
identifier[cdict] ={
literal[string] :[],
literal[string] :[],
literal[string] :[],
literal[string] :[]
}
identifier[reg_index] = identifier[np] . identifier[linspace] ( identifier[start] , identifier[stop] , literal[int] )
identifier[shift_index] = identifier[np] . identifier[hstack] ([
identifier[np] . identifier[linspace] ( literal[int] , identifier[midpoint] , literal[int] , identifier[endpoint] = keyword[False] ),
identifier[np] . identifier[linspace] ( identifier[midpoint] , literal[int] , literal[int] , identifier[endpoint] = keyword[True] )
])
keyword[for] identifier[ri] , identifier[si] keyword[in] identifier[zip] ( identifier[reg_index] , identifier[shift_index] ):
identifier[r] , identifier[g] , identifier[b] , identifier[a] = identifier[cmap] ( identifier[ri] )
identifier[cdict] [ literal[string] ]. identifier[append] (( identifier[si] , identifier[r] , identifier[r] ))
identifier[cdict] [ literal[string] ]. identifier[append] (( identifier[si] , identifier[g] , identifier[g] ))
identifier[cdict] [ literal[string] ]. identifier[append] (( identifier[si] , identifier[b] , identifier[b] ))
identifier[cdict] [ literal[string] ]. identifier[append] (( identifier[si] , identifier[a] , identifier[a] ))
identifier[newcmap] = identifier[matplotlib] . identifier[colors] . identifier[LinearSegmentedColormap] ( identifier[name] , identifier[cdict] )
identifier[plt] . identifier[register_cmap] ( identifier[cmap] = identifier[newcmap] )
keyword[return] identifier[newcmap]
identifier[loading_switches] = identifier[abs] (
identifier[networkA] . identifier[lines_t] . identifier[p0] . identifier[mul] ( identifier[networkA] . identifier[snapshot_weightings] , identifier[axis] = literal[int] ). identifier[loc] [ identifier[networkA] . identifier[snapshots] [ identifier[timestep] ]]. identifier[to_frame] ())
identifier[loading_switches] . identifier[columns] =[ literal[string] ]
identifier[loading_noswitches] = identifier[abs] (
identifier[networkB] . identifier[lines_t] . identifier[p0] . identifier[mul] ( identifier[networkB] . identifier[snapshot_weightings] , identifier[axis] = literal[int] ). identifier[loc] [ identifier[networkB] . identifier[snapshots] [ identifier[timestep] ]]. identifier[to_frame] ())
identifier[loading_noswitches] . identifier[columns] =[ literal[string] ]
identifier[diff_network] = identifier[loading_switches] . identifier[join] ( identifier[loading_noswitches] )
identifier[diff_network] [ literal[string] ]= identifier[diff_network] [ literal[string] ]. identifier[fillna] (
identifier[diff_network] [ literal[string] ])
identifier[diff_network] [ identifier[networkA] . identifier[snapshots] [ identifier[timestep] ]]= identifier[diff_network] [ literal[string] ]- identifier[diff_network] [ literal[string] ]
identifier[new_buses] = identifier[pd] . identifier[Series] ( identifier[index] = identifier[networkA] . identifier[buses] . identifier[index] . identifier[values] )
identifier[new_buses] . identifier[loc] [ identifier[set] ( identifier[networkA] . identifier[buses] . identifier[index] . identifier[values] )-
identifier[set] ( identifier[networkB] . identifier[buses] . identifier[index] . identifier[values] )]= literal[int]
identifier[new_buses] = identifier[new_buses] . identifier[fillna] ( literal[int] )
identifier[loading] =( identifier[diff_network] . identifier[loc] [:, identifier[networkA] . identifier[snapshots] [ identifier[timestep] ]]/
( identifier[networkA] . identifier[lines] . identifier[s_nom] ))* literal[int]
identifier[midpoint] = literal[int] - identifier[max] ( identifier[loading] )/( identifier[max] ( identifier[loading] )+ identifier[abs] ( identifier[min] ( identifier[loading] )))
identifier[shifted_cmap] = identifier[shiftedColorMap] (
identifier[plt] . identifier[cm] . identifier[jet] , identifier[midpoint] = identifier[midpoint] , identifier[name] = literal[string] )
identifier[ll] = identifier[networkA] . identifier[plot] ( identifier[line_colors] = identifier[loading] , identifier[line_cmap] = identifier[shifted_cmap] ,
identifier[title] = literal[string] , identifier[bus_sizes] = identifier[new_buses] ,
identifier[bus_colors] = literal[string] , identifier[line_widths] = literal[int] )
identifier[cb] = identifier[plt] . identifier[colorbar] ( identifier[ll] [ literal[int] ])
identifier[cb] . identifier[set_label] ( literal[string] ) | def plot_line_loading_diff(networkA, networkB, timestep=0):
"""
Plot difference in line loading between two networks
(with and without switches) as color on lines
Positive values mean that line loading with switches is bigger than without
Plot switches as small dots
Parameters
----------
networkA : PyPSA network container
Holds topology of grid with switches
including results from powerflow analysis
networkB : PyPSA network container
Holds topology of grid without switches
including results from powerflow analysis
filename : str
Specify filename
If not given, figure will be show directly
timestep : int
timestep to show, default is 0
"""
# new colormap to make sure 0% difference has the same color in every plot
def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'):
"""
Function to offset the "center" of a colormap. Useful for
data with a negative min and positive max and you want the
middle of the colormap's dynamic range to be at zero
Input
-----
cmap : The matplotlib colormap to be altered
start : Offset from lowest point in the colormap's range.
Defaults to 0.0 (no lower ofset). Should be between
0.0 and `midpoint`.
midpoint : The new center of the colormap. Defaults to
0.5 (no shift). Should be between 0.0 and 1.0. In
general, this should be 1 - vmax/(vmax + abs(vmin))
For example if your data range from -15.0 to +5.0 and
you want the center of the colormap at 0.0, `midpoint`
should be set to 1 - 5/(5 + 15)) or 0.75
stop : Offset from highets point in the colormap's range.
Defaults to 1.0 (no upper ofset). Should be between
`midpoint` and 1.0.
"""
cdict = {'red': [], 'green': [], 'blue': [], 'alpha': []}
# regular index to compute the colors
reg_index = np.linspace(start, stop, 257)
# shifted index to match the data
shift_index = np.hstack([np.linspace(0.0, midpoint, 128, endpoint=False), np.linspace(midpoint, 1.0, 129, endpoint=True)])
for (ri, si) in zip(reg_index, shift_index):
(r, g, b, a) = cmap(ri)
cdict['red'].append((si, r, r))
cdict['green'].append((si, g, g))
cdict['blue'].append((si, b, b))
cdict['alpha'].append((si, a, a)) # depends on [control=['for'], data=[]]
newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict)
plt.register_cmap(cmap=newcmap)
return newcmap
# calculate difference in loading between both networks
loading_switches = abs(networkA.lines_t.p0.mul(networkA.snapshot_weightings, axis=0).loc[networkA.snapshots[timestep]].to_frame())
loading_switches.columns = ['switch']
loading_noswitches = abs(networkB.lines_t.p0.mul(networkB.snapshot_weightings, axis=0).loc[networkB.snapshots[timestep]].to_frame())
loading_noswitches.columns = ['noswitch']
diff_network = loading_switches.join(loading_noswitches)
diff_network['noswitch'] = diff_network['noswitch'].fillna(diff_network['switch'])
diff_network[networkA.snapshots[timestep]] = diff_network['switch'] - diff_network['noswitch']
# get switches
new_buses = pd.Series(index=networkA.buses.index.values)
new_buses.loc[set(networkA.buses.index.values) - set(networkB.buses.index.values)] = 0.1
new_buses = new_buses.fillna(0)
# plot network with difference in loading and shifted colormap
loading = diff_network.loc[:, networkA.snapshots[timestep]] / networkA.lines.s_nom * 100
midpoint = 1 - max(loading) / (max(loading) + abs(min(loading)))
shifted_cmap = shiftedColorMap(plt.cm.jet, midpoint=midpoint, name='shifted')
ll = networkA.plot(line_colors=loading, line_cmap=shifted_cmap, title='Line loading', bus_sizes=new_buses, bus_colors='blue', line_widths=0.55)
cb = plt.colorbar(ll[1])
cb.set_label('Difference in line loading in % of s_nom') |
def configure_installed_apps_logger(level, verbose=False,
additional_packages=None, filename=None):
"""Builds and enables a logger with a logger list of the top-level list of
installed app modules (based on package name) plus any additional
application packages passed in - for example, a user may want to log a
dependent package of one the installed apps. The logger will write either
to the console or to a file based on the presence of the filename parameter.
Check that the LOGGING_CONFIG setting is None before we configure the logger
in order to prevent maintaining Django's list of log handlers."""
if settings.LOGGING_CONFIG:
raise ImproperlyConfigured(LOGGING_CONFIG_ERROR_MSG)
app_set = _normalize_apps(settings.INSTALLED_APPS)
# Add any additional app modules to the set of apps we want to configure
if additional_packages:
# Make sure we're dealing with a list of additional apps before we
# convert to a set
if not isinstance(additional_packages, list):
additional_packages = list(additional_packages)
# Update the app set with these additional app modules
app_set.update(set(additional_packages))
config = _build_logging_config(
level, app_set, verbose, filename)
logging.config.dictConfig(config) | def function[configure_installed_apps_logger, parameter[level, verbose, additional_packages, filename]]:
constant[Builds and enables a logger with a logger list of the top-level list of
installed app modules (based on package name) plus any additional
application packages passed in - for example, a user may want to log a
dependent package of one the installed apps. The logger will write either
to the console or to a file based on the presence of the filename parameter.
Check that the LOGGING_CONFIG setting is None before we configure the logger
in order to prevent maintaining Django's list of log handlers.]
if name[settings].LOGGING_CONFIG begin[:]
<ast.Raise object at 0x7da1b168e9b0>
variable[app_set] assign[=] call[name[_normalize_apps], parameter[name[settings].INSTALLED_APPS]]
if name[additional_packages] begin[:]
if <ast.UnaryOp object at 0x7da1b168c550> begin[:]
variable[additional_packages] assign[=] call[name[list], parameter[name[additional_packages]]]
call[name[app_set].update, parameter[call[name[set], parameter[name[additional_packages]]]]]
variable[config] assign[=] call[name[_build_logging_config], parameter[name[level], name[app_set], name[verbose], name[filename]]]
call[name[logging].config.dictConfig, parameter[name[config]]] | keyword[def] identifier[configure_installed_apps_logger] ( identifier[level] , identifier[verbose] = keyword[False] ,
identifier[additional_packages] = keyword[None] , identifier[filename] = keyword[None] ):
literal[string]
keyword[if] identifier[settings] . identifier[LOGGING_CONFIG] :
keyword[raise] identifier[ImproperlyConfigured] ( identifier[LOGGING_CONFIG_ERROR_MSG] )
identifier[app_set] = identifier[_normalize_apps] ( identifier[settings] . identifier[INSTALLED_APPS] )
keyword[if] identifier[additional_packages] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[additional_packages] , identifier[list] ):
identifier[additional_packages] = identifier[list] ( identifier[additional_packages] )
identifier[app_set] . identifier[update] ( identifier[set] ( identifier[additional_packages] ))
identifier[config] = identifier[_build_logging_config] (
identifier[level] , identifier[app_set] , identifier[verbose] , identifier[filename] )
identifier[logging] . identifier[config] . identifier[dictConfig] ( identifier[config] ) | def configure_installed_apps_logger(level, verbose=False, additional_packages=None, filename=None):
"""Builds and enables a logger with a logger list of the top-level list of
installed app modules (based on package name) plus any additional
application packages passed in - for example, a user may want to log a
dependent package of one the installed apps. The logger will write either
to the console or to a file based on the presence of the filename parameter.
Check that the LOGGING_CONFIG setting is None before we configure the logger
in order to prevent maintaining Django's list of log handlers."""
if settings.LOGGING_CONFIG:
raise ImproperlyConfigured(LOGGING_CONFIG_ERROR_MSG) # depends on [control=['if'], data=[]]
app_set = _normalize_apps(settings.INSTALLED_APPS)
# Add any additional app modules to the set of apps we want to configure
if additional_packages:
# Make sure we're dealing with a list of additional apps before we
# convert to a set
if not isinstance(additional_packages, list):
additional_packages = list(additional_packages) # depends on [control=['if'], data=[]]
# Update the app set with these additional app modules
app_set.update(set(additional_packages)) # depends on [control=['if'], data=[]]
config = _build_logging_config(level, app_set, verbose, filename)
logging.config.dictConfig(config) |
def unlock(self, time=3):
"""
unlock the door\n
thanks to https://github.com/SoftwareHouseMerida/pyzk/
:param time: define delay in seconds
:return: bool
"""
command = const.CMD_UNLOCK
command_string = pack("I",int(time)*10)
cmd_response = self.__send_command(command, command_string)
if cmd_response.get('status'):
return True
else:
raise ZKErrorResponse("Can't open door") | def function[unlock, parameter[self, time]]:
constant[
unlock the door
thanks to https://github.com/SoftwareHouseMerida/pyzk/
:param time: define delay in seconds
:return: bool
]
variable[command] assign[=] name[const].CMD_UNLOCK
variable[command_string] assign[=] call[name[pack], parameter[constant[I], binary_operation[call[name[int], parameter[name[time]]] * constant[10]]]]
variable[cmd_response] assign[=] call[name[self].__send_command, parameter[name[command], name[command_string]]]
if call[name[cmd_response].get, parameter[constant[status]]] begin[:]
return[constant[True]] | keyword[def] identifier[unlock] ( identifier[self] , identifier[time] = literal[int] ):
literal[string]
identifier[command] = identifier[const] . identifier[CMD_UNLOCK]
identifier[command_string] = identifier[pack] ( literal[string] , identifier[int] ( identifier[time] )* literal[int] )
identifier[cmd_response] = identifier[self] . identifier[__send_command] ( identifier[command] , identifier[command_string] )
keyword[if] identifier[cmd_response] . identifier[get] ( literal[string] ):
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[ZKErrorResponse] ( literal[string] ) | def unlock(self, time=3):
"""
unlock the door
thanks to https://github.com/SoftwareHouseMerida/pyzk/
:param time: define delay in seconds
:return: bool
"""
command = const.CMD_UNLOCK
command_string = pack('I', int(time) * 10)
cmd_response = self.__send_command(command, command_string)
if cmd_response.get('status'):
return True # depends on [control=['if'], data=[]]
else:
raise ZKErrorResponse("Can't open door") |
def move_left(self):
"""Make the drone move left."""
self.at(ardrone.at.pcmd, True, -self.speed, 0, 0, 0) | def function[move_left, parameter[self]]:
constant[Make the drone move left.]
call[name[self].at, parameter[name[ardrone].at.pcmd, constant[True], <ast.UnaryOp object at 0x7da1b1042740>, constant[0], constant[0], constant[0]]] | keyword[def] identifier[move_left] ( identifier[self] ):
literal[string]
identifier[self] . identifier[at] ( identifier[ardrone] . identifier[at] . identifier[pcmd] , keyword[True] ,- identifier[self] . identifier[speed] , literal[int] , literal[int] , literal[int] ) | def move_left(self):
"""Make the drone move left."""
self.at(ardrone.at.pcmd, True, -self.speed, 0, 0, 0) |
def log_likelihood(z, x, P, H, R):
"""
Returns log-likelihood of the measurement z given the Gaussian
posterior (x, P) using measurement function H and measurement
covariance error R
"""
S = np.dot(H, np.dot(P, H.T)) + R
return logpdf(z, np.dot(H, x), S) | def function[log_likelihood, parameter[z, x, P, H, R]]:
constant[
Returns log-likelihood of the measurement z given the Gaussian
posterior (x, P) using measurement function H and measurement
covariance error R
]
variable[S] assign[=] binary_operation[call[name[np].dot, parameter[name[H], call[name[np].dot, parameter[name[P], name[H].T]]]] + name[R]]
return[call[name[logpdf], parameter[name[z], call[name[np].dot, parameter[name[H], name[x]]], name[S]]]] | keyword[def] identifier[log_likelihood] ( identifier[z] , identifier[x] , identifier[P] , identifier[H] , identifier[R] ):
literal[string]
identifier[S] = identifier[np] . identifier[dot] ( identifier[H] , identifier[np] . identifier[dot] ( identifier[P] , identifier[H] . identifier[T] ))+ identifier[R]
keyword[return] identifier[logpdf] ( identifier[z] , identifier[np] . identifier[dot] ( identifier[H] , identifier[x] ), identifier[S] ) | def log_likelihood(z, x, P, H, R):
"""
Returns log-likelihood of the measurement z given the Gaussian
posterior (x, P) using measurement function H and measurement
covariance error R
"""
S = np.dot(H, np.dot(P, H.T)) + R
return logpdf(z, np.dot(H, x), S) |
def create_spooled_temporary_file(filepath=None, fileobj=None):
"""
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile`
"""
spooled_file = tempfile.SpooledTemporaryFile(
max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
if filepath:
fileobj = open(filepath, 'r+b')
if fileobj is not None:
fileobj.seek(0)
copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE)
return spooled_file | def function[create_spooled_temporary_file, parameter[filepath, fileobj]]:
constant[
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile`
]
variable[spooled_file] assign[=] call[name[tempfile].SpooledTemporaryFile, parameter[]]
if name[filepath] begin[:]
variable[fileobj] assign[=] call[name[open], parameter[name[filepath], constant[r+b]]]
if compare[name[fileobj] is_not constant[None]] begin[:]
call[name[fileobj].seek, parameter[constant[0]]]
call[name[copyfileobj], parameter[name[fileobj], name[spooled_file], name[settings].TMP_FILE_READ_SIZE]]
return[name[spooled_file]] | keyword[def] identifier[create_spooled_temporary_file] ( identifier[filepath] = keyword[None] , identifier[fileobj] = keyword[None] ):
literal[string]
identifier[spooled_file] = identifier[tempfile] . identifier[SpooledTemporaryFile] (
identifier[max_size] = identifier[settings] . identifier[TMP_FILE_MAX_SIZE] ,
identifier[dir] = identifier[settings] . identifier[TMP_DIR] )
keyword[if] identifier[filepath] :
identifier[fileobj] = identifier[open] ( identifier[filepath] , literal[string] )
keyword[if] identifier[fileobj] keyword[is] keyword[not] keyword[None] :
identifier[fileobj] . identifier[seek] ( literal[int] )
identifier[copyfileobj] ( identifier[fileobj] , identifier[spooled_file] , identifier[settings] . identifier[TMP_FILE_READ_SIZE] )
keyword[return] identifier[spooled_file] | def create_spooled_temporary_file(filepath=None, fileobj=None):
"""
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile`
"""
spooled_file = tempfile.SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR)
if filepath:
fileobj = open(filepath, 'r+b') # depends on [control=['if'], data=[]]
if fileobj is not None:
fileobj.seek(0)
copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE) # depends on [control=['if'], data=['fileobj']]
return spooled_file |
def _zforce(self, R, z, phi=0., t=0.):
"""
NAME:
_zforce
PURPOSE:
evaluate the vertical force at (R,z, phi)
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
vertical force at (R,z, phi)
HISTORY:
2016-06-06 - Written - Aladdin
"""
if not self.isNonAxi and phi is None:
phi= 0.
r, theta, phi = bovy_coords.cyl_to_spher(R,z,phi)
#x = z
dr_dz = nu.divide(z,r); dtheta_dz = nu.divide(-R,r**2); dphi_dz = 0
return self._computeforceArray(dr_dz, dtheta_dz, dphi_dz, R,z,phi) | def function[_zforce, parameter[self, R, z, phi, t]]:
constant[
NAME:
_zforce
PURPOSE:
evaluate the vertical force at (R,z, phi)
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
vertical force at (R,z, phi)
HISTORY:
2016-06-06 - Written - Aladdin
]
if <ast.BoolOp object at 0x7da20c795ab0> begin[:]
variable[phi] assign[=] constant[0.0]
<ast.Tuple object at 0x7da20c795420> assign[=] call[name[bovy_coords].cyl_to_spher, parameter[name[R], name[z], name[phi]]]
variable[dr_dz] assign[=] call[name[nu].divide, parameter[name[z], name[r]]]
variable[dtheta_dz] assign[=] call[name[nu].divide, parameter[<ast.UnaryOp object at 0x7da18dc9a0b0>, binary_operation[name[r] ** constant[2]]]]
variable[dphi_dz] assign[=] constant[0]
return[call[name[self]._computeforceArray, parameter[name[dr_dz], name[dtheta_dz], name[dphi_dz], name[R], name[z], name[phi]]]] | keyword[def] identifier[_zforce] ( identifier[self] , identifier[R] , identifier[z] , identifier[phi] = literal[int] , identifier[t] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[isNonAxi] keyword[and] identifier[phi] keyword[is] keyword[None] :
identifier[phi] = literal[int]
identifier[r] , identifier[theta] , identifier[phi] = identifier[bovy_coords] . identifier[cyl_to_spher] ( identifier[R] , identifier[z] , identifier[phi] )
identifier[dr_dz] = identifier[nu] . identifier[divide] ( identifier[z] , identifier[r] ); identifier[dtheta_dz] = identifier[nu] . identifier[divide] (- identifier[R] , identifier[r] ** literal[int] ); identifier[dphi_dz] = literal[int]
keyword[return] identifier[self] . identifier[_computeforceArray] ( identifier[dr_dz] , identifier[dtheta_dz] , identifier[dphi_dz] , identifier[R] , identifier[z] , identifier[phi] ) | def _zforce(self, R, z, phi=0.0, t=0.0):
"""
NAME:
_zforce
PURPOSE:
evaluate the vertical force at (R,z, phi)
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
vertical force at (R,z, phi)
HISTORY:
2016-06-06 - Written - Aladdin
"""
if not self.isNonAxi and phi is None:
phi = 0.0 # depends on [control=['if'], data=[]]
(r, theta, phi) = bovy_coords.cyl_to_spher(R, z, phi)
#x = z
dr_dz = nu.divide(z, r)
dtheta_dz = nu.divide(-R, r ** 2)
dphi_dz = 0
return self._computeforceArray(dr_dz, dtheta_dz, dphi_dz, R, z, phi) |
def _remove_unused_wires(block, keep_inputs=True):
""" Removes all unconnected wires from a block"""
valid_wires = set()
for logic_net in block.logic:
valid_wires.update(logic_net.args, logic_net.dests)
wire_removal_set = block.wirevector_set.difference(valid_wires)
for removed_wire in wire_removal_set:
if isinstance(removed_wire, Input):
term = " optimized away"
if keep_inputs:
valid_wires.add(removed_wire)
term = " deemed useless by optimization"
print("Input Wire, " + removed_wire.name + " has been" + term)
if isinstance(removed_wire, Output):
PyrtlInternalError("Output wire, " + removed_wire.name + " not driven")
block.wirevector_set = valid_wires | def function[_remove_unused_wires, parameter[block, keep_inputs]]:
constant[ Removes all unconnected wires from a block]
variable[valid_wires] assign[=] call[name[set], parameter[]]
for taget[name[logic_net]] in starred[name[block].logic] begin[:]
call[name[valid_wires].update, parameter[name[logic_net].args, name[logic_net].dests]]
variable[wire_removal_set] assign[=] call[name[block].wirevector_set.difference, parameter[name[valid_wires]]]
for taget[name[removed_wire]] in starred[name[wire_removal_set]] begin[:]
if call[name[isinstance], parameter[name[removed_wire], name[Input]]] begin[:]
variable[term] assign[=] constant[ optimized away]
if name[keep_inputs] begin[:]
call[name[valid_wires].add, parameter[name[removed_wire]]]
variable[term] assign[=] constant[ deemed useless by optimization]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[Input Wire, ] + name[removed_wire].name] + constant[ has been]] + name[term]]]]
if call[name[isinstance], parameter[name[removed_wire], name[Output]]] begin[:]
call[name[PyrtlInternalError], parameter[binary_operation[binary_operation[constant[Output wire, ] + name[removed_wire].name] + constant[ not driven]]]]
name[block].wirevector_set assign[=] name[valid_wires] | keyword[def] identifier[_remove_unused_wires] ( identifier[block] , identifier[keep_inputs] = keyword[True] ):
literal[string]
identifier[valid_wires] = identifier[set] ()
keyword[for] identifier[logic_net] keyword[in] identifier[block] . identifier[logic] :
identifier[valid_wires] . identifier[update] ( identifier[logic_net] . identifier[args] , identifier[logic_net] . identifier[dests] )
identifier[wire_removal_set] = identifier[block] . identifier[wirevector_set] . identifier[difference] ( identifier[valid_wires] )
keyword[for] identifier[removed_wire] keyword[in] identifier[wire_removal_set] :
keyword[if] identifier[isinstance] ( identifier[removed_wire] , identifier[Input] ):
identifier[term] = literal[string]
keyword[if] identifier[keep_inputs] :
identifier[valid_wires] . identifier[add] ( identifier[removed_wire] )
identifier[term] = literal[string]
identifier[print] ( literal[string] + identifier[removed_wire] . identifier[name] + literal[string] + identifier[term] )
keyword[if] identifier[isinstance] ( identifier[removed_wire] , identifier[Output] ):
identifier[PyrtlInternalError] ( literal[string] + identifier[removed_wire] . identifier[name] + literal[string] )
identifier[block] . identifier[wirevector_set] = identifier[valid_wires] | def _remove_unused_wires(block, keep_inputs=True):
""" Removes all unconnected wires from a block"""
valid_wires = set()
for logic_net in block.logic:
valid_wires.update(logic_net.args, logic_net.dests) # depends on [control=['for'], data=['logic_net']]
wire_removal_set = block.wirevector_set.difference(valid_wires)
for removed_wire in wire_removal_set:
if isinstance(removed_wire, Input):
term = ' optimized away'
if keep_inputs:
valid_wires.add(removed_wire)
term = ' deemed useless by optimization' # depends on [control=['if'], data=[]]
print('Input Wire, ' + removed_wire.name + ' has been' + term) # depends on [control=['if'], data=[]]
if isinstance(removed_wire, Output):
PyrtlInternalError('Output wire, ' + removed_wire.name + ' not driven') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['removed_wire']]
block.wirevector_set = valid_wires |
def sites_at_edges( self ):
"""
Finds the six sites with the maximum and minimum coordinates along x, y, and z.
Args:
None
Returns:
(List(List)): In the order [ +x, -x, +y, -y, +z, -z ]
"""
min_x = min( [ s.r[0] for s in self.sites ] )
max_x = max( [ s.r[0] for s in self.sites ] )
min_y = min( [ s.r[1] for s in self.sites ] )
max_y = max( [ s.r[1] for s in self.sites ] )
min_z = min( [ s.r[2] for s in self.sites ] )
max_z = max( [ s.r[2] for s in self.sites ] )
x_max = [ s for s in self.sites if s.r[0] == min_x ]
x_min = [ s for s in self.sites if s.r[0] == max_x ]
y_max = [ s for s in self.sites if s.r[1] == min_y ]
y_min = [ s for s in self.sites if s.r[1] == max_y ]
z_max = [ s for s in self.sites if s.r[2] == min_z ]
z_min = [ s for s in self.sites if s.r[2] == max_z ]
return ( x_max, x_min, y_max, y_min, z_max, z_min ) | def function[sites_at_edges, parameter[self]]:
constant[
Finds the six sites with the maximum and minimum coordinates along x, y, and z.
Args:
None
Returns:
(List(List)): In the order [ +x, -x, +y, -y, +z, -z ]
]
variable[min_x] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da18bcc8c40>]]
variable[max_x] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da18bcca9b0>]]
variable[min_y] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da207f9bac0>]]
variable[max_y] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da207f9b4c0>]]
variable[min_z] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da207f9b850>]]
variable[max_z] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da207f988b0>]]
variable[x_max] assign[=] <ast.ListComp object at 0x7da207f9bd90>
variable[x_min] assign[=] <ast.ListComp object at 0x7da207f9a4d0>
variable[y_max] assign[=] <ast.ListComp object at 0x7da207f9a920>
variable[y_min] assign[=] <ast.ListComp object at 0x7da207f9a650>
variable[z_max] assign[=] <ast.ListComp object at 0x7da207f994e0>
variable[z_min] assign[=] <ast.ListComp object at 0x7da207f991e0>
return[tuple[[<ast.Name object at 0x7da1b27bb550>, <ast.Name object at 0x7da1b27b94e0>, <ast.Name object at 0x7da1b27bb100>, <ast.Name object at 0x7da1b27b9810>, <ast.Name object at 0x7da1b27bafb0>, <ast.Name object at 0x7da1b27bb0a0>]]] | keyword[def] identifier[sites_at_edges] ( identifier[self] ):
literal[string]
identifier[min_x] = identifier[min] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[max_x] = identifier[max] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[min_y] = identifier[min] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[max_y] = identifier[max] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[min_z] = identifier[min] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[max_z] = identifier[max] ([ identifier[s] . identifier[r] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] ])
identifier[x_max] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[min_x] ]
identifier[x_min] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[max_x] ]
identifier[y_max] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[min_y] ]
identifier[y_min] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[max_y] ]
identifier[z_max] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[min_z] ]
identifier[z_min] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[sites] keyword[if] identifier[s] . identifier[r] [ literal[int] ]== identifier[max_z] ]
keyword[return] ( identifier[x_max] , identifier[x_min] , identifier[y_max] , identifier[y_min] , identifier[z_max] , identifier[z_min] ) | def sites_at_edges(self):
"""
Finds the six sites with the maximum and minimum coordinates along x, y, and z.
Args:
None
Returns:
(List(List)): In the order [ +x, -x, +y, -y, +z, -z ]
"""
min_x = min([s.r[0] for s in self.sites])
max_x = max([s.r[0] for s in self.sites])
min_y = min([s.r[1] for s in self.sites])
max_y = max([s.r[1] for s in self.sites])
min_z = min([s.r[2] for s in self.sites])
max_z = max([s.r[2] for s in self.sites])
x_max = [s for s in self.sites if s.r[0] == min_x]
x_min = [s for s in self.sites if s.r[0] == max_x]
y_max = [s for s in self.sites if s.r[1] == min_y]
y_min = [s for s in self.sites if s.r[1] == max_y]
z_max = [s for s in self.sites if s.r[2] == min_z]
z_min = [s for s in self.sites if s.r[2] == max_z]
return (x_max, x_min, y_max, y_min, z_max, z_min) |
def stop(self, timeout=None, callback=None):
"""Stop ioloop (if managed) and call callback in ioloop before close.
Parameters
----------
timeout : float or None
Seconds to wait for ioloop to have *started*.
Returns
-------
stopped : thread-safe Future
Resolves when the callback() is done
"""
if timeout:
self._running.wait(timeout)
stopped_future = Future()
@gen.coroutine
def _stop():
if callback:
try:
yield gen.maybe_future(callback())
except Exception:
self._logger.exception('Unhandled exception calling stop callback')
if self._ioloop_managed:
self._logger.info('Stopping ioloop {0!r}'.format(self._ioloop))
# Allow ioloop to run once before stopping so that callbacks
# scheduled by callback() above get a chance to run.
yield gen.moment
self._ioloop.stop()
self._running.clear()
try:
self._ioloop.add_callback(
lambda: gen.chain_future(_stop(), stopped_future))
except AttributeError:
# Probably we have been shut-down already
pass
return stopped_future | def function[stop, parameter[self, timeout, callback]]:
constant[Stop ioloop (if managed) and call callback in ioloop before close.
Parameters
----------
timeout : float or None
Seconds to wait for ioloop to have *started*.
Returns
-------
stopped : thread-safe Future
Resolves when the callback() is done
]
if name[timeout] begin[:]
call[name[self]._running.wait, parameter[name[timeout]]]
variable[stopped_future] assign[=] call[name[Future], parameter[]]
def function[_stop, parameter[]]:
if name[callback] begin[:]
<ast.Try object at 0x7da1b05c40d0>
if name[self]._ioloop_managed begin[:]
call[name[self]._logger.info, parameter[call[constant[Stopping ioloop {0!r}].format, parameter[name[self]._ioloop]]]]
<ast.Yield object at 0x7da18f722d40>
call[name[self]._ioloop.stop, parameter[]]
call[name[self]._running.clear, parameter[]]
<ast.Try object at 0x7da1b0569600>
return[name[stopped_future]] | keyword[def] identifier[stop] ( identifier[self] , identifier[timeout] = keyword[None] , identifier[callback] = keyword[None] ):
literal[string]
keyword[if] identifier[timeout] :
identifier[self] . identifier[_running] . identifier[wait] ( identifier[timeout] )
identifier[stopped_future] = identifier[Future] ()
@ identifier[gen] . identifier[coroutine]
keyword[def] identifier[_stop] ():
keyword[if] identifier[callback] :
keyword[try] :
keyword[yield] identifier[gen] . identifier[maybe_future] ( identifier[callback] ())
keyword[except] identifier[Exception] :
identifier[self] . identifier[_logger] . identifier[exception] ( literal[string] )
keyword[if] identifier[self] . identifier[_ioloop_managed] :
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[_ioloop] ))
keyword[yield] identifier[gen] . identifier[moment]
identifier[self] . identifier[_ioloop] . identifier[stop] ()
identifier[self] . identifier[_running] . identifier[clear] ()
keyword[try] :
identifier[self] . identifier[_ioloop] . identifier[add_callback] (
keyword[lambda] : identifier[gen] . identifier[chain_future] ( identifier[_stop] (), identifier[stopped_future] ))
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[stopped_future] | def stop(self, timeout=None, callback=None):
"""Stop ioloop (if managed) and call callback in ioloop before close.
Parameters
----------
timeout : float or None
Seconds to wait for ioloop to have *started*.
Returns
-------
stopped : thread-safe Future
Resolves when the callback() is done
"""
if timeout:
self._running.wait(timeout) # depends on [control=['if'], data=[]]
stopped_future = Future()
@gen.coroutine
def _stop():
if callback:
try:
yield gen.maybe_future(callback()) # depends on [control=['try'], data=[]]
except Exception:
self._logger.exception('Unhandled exception calling stop callback') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if self._ioloop_managed:
self._logger.info('Stopping ioloop {0!r}'.format(self._ioloop))
# Allow ioloop to run once before stopping so that callbacks
# scheduled by callback() above get a chance to run.
yield gen.moment
self._ioloop.stop() # depends on [control=['if'], data=[]]
self._running.clear()
try:
self._ioloop.add_callback(lambda : gen.chain_future(_stop(), stopped_future)) # depends on [control=['try'], data=[]]
except AttributeError:
# Probably we have been shut-down already
pass # depends on [control=['except'], data=[]]
return stopped_future |
def add_tag(self, tag):
""" add a tag to the tag list """
if tag not in self._tags:
self._tags[tag] = dict() | def function[add_tag, parameter[self, tag]]:
constant[ add a tag to the tag list ]
if compare[name[tag] <ast.NotIn object at 0x7da2590d7190> name[self]._tags] begin[:]
call[name[self]._tags][name[tag]] assign[=] call[name[dict], parameter[]] | keyword[def] identifier[add_tag] ( identifier[self] , identifier[tag] ):
literal[string]
keyword[if] identifier[tag] keyword[not] keyword[in] identifier[self] . identifier[_tags] :
identifier[self] . identifier[_tags] [ identifier[tag] ]= identifier[dict] () | def add_tag(self, tag):
""" add a tag to the tag list """
if tag not in self._tags:
self._tags[tag] = dict() # depends on [control=['if'], data=['tag']] |
async def _cancel(log, **tasks):
"""
Helper to cancel one or more tasks gracefully, logging exceptions.
"""
for name, task in tasks.items():
if not task:
continue
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception:
log.exception('Unhandled exception from %s after cancel', name) | <ast.AsyncFunctionDef object at 0x7da1b21d7670> | keyword[async] keyword[def] identifier[_cancel] ( identifier[log] ,** identifier[tasks] ):
literal[string]
keyword[for] identifier[name] , identifier[task] keyword[in] identifier[tasks] . identifier[items] ():
keyword[if] keyword[not] identifier[task] :
keyword[continue]
identifier[task] . identifier[cancel] ()
keyword[try] :
keyword[await] identifier[task]
keyword[except] identifier[asyncio] . identifier[CancelledError] :
keyword[pass]
keyword[except] identifier[Exception] :
identifier[log] . identifier[exception] ( literal[string] , identifier[name] ) | async def _cancel(log, **tasks):
"""
Helper to cancel one or more tasks gracefully, logging exceptions.
"""
for (name, task) in tasks.items():
if not task:
continue # depends on [control=['if'], data=[]]
task.cancel()
try:
await task # depends on [control=['try'], data=[]]
except asyncio.CancelledError:
pass # depends on [control=['except'], data=[]]
except Exception:
log.exception('Unhandled exception from %s after cancel', name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] |
def simxGetDistanceHandle(clientID, distanceObjectName, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
handle = ct.c_int()
if (sys.version_info[0] == 3) and (type(distanceObjectName) is str):
distanceObjectName=distanceObjectName.encode('utf-8')
return c_GetDistanceHandle(clientID, distanceObjectName, ct.byref(handle), operationMode), handle.value | def function[simxGetDistanceHandle, parameter[clientID, distanceObjectName, operationMode]]:
constant[
Please have a look at the function description/documentation in the V-REP user manual
]
variable[handle] assign[=] call[name[ct].c_int, parameter[]]
if <ast.BoolOp object at 0x7da1b138df60> begin[:]
variable[distanceObjectName] assign[=] call[name[distanceObjectName].encode, parameter[constant[utf-8]]]
return[tuple[[<ast.Call object at 0x7da1b138c5b0>, <ast.Attribute object at 0x7da1b138dea0>]]] | keyword[def] identifier[simxGetDistanceHandle] ( identifier[clientID] , identifier[distanceObjectName] , identifier[operationMode] ):
literal[string]
identifier[handle] = identifier[ct] . identifier[c_int] ()
keyword[if] ( identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] ) keyword[and] ( identifier[type] ( identifier[distanceObjectName] ) keyword[is] identifier[str] ):
identifier[distanceObjectName] = identifier[distanceObjectName] . identifier[encode] ( literal[string] )
keyword[return] identifier[c_GetDistanceHandle] ( identifier[clientID] , identifier[distanceObjectName] , identifier[ct] . identifier[byref] ( identifier[handle] ), identifier[operationMode] ), identifier[handle] . identifier[value] | def simxGetDistanceHandle(clientID, distanceObjectName, operationMode):
"""
Please have a look at the function description/documentation in the V-REP user manual
"""
handle = ct.c_int()
if sys.version_info[0] == 3 and type(distanceObjectName) is str:
distanceObjectName = distanceObjectName.encode('utf-8') # depends on [control=['if'], data=[]]
return (c_GetDistanceHandle(clientID, distanceObjectName, ct.byref(handle), operationMode), handle.value) |
def trim_floating_solid(im):
r"""
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(~im)
im[holes] = True
return im | def function[trim_floating_solid, parameter[im]]:
constant[
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels
]
variable[im] assign[=] call[name[sp].copy, parameter[name[im]]]
variable[holes] assign[=] call[name[find_disconnected_voxels], parameter[<ast.UnaryOp object at 0x7da1b07acca0>]]
call[name[im]][name[holes]] assign[=] constant[True]
return[name[im]] | keyword[def] identifier[trim_floating_solid] ( identifier[im] ):
literal[string]
identifier[im] = identifier[sp] . identifier[copy] ( identifier[im] )
identifier[holes] = identifier[find_disconnected_voxels] (~ identifier[im] )
identifier[im] [ identifier[holes] ]= keyword[True]
keyword[return] identifier[im] | def trim_floating_solid(im):
"""
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(~im)
im[holes] = True
return im |
def get_library(path=None, root=None, db=None):
import ambry.library as _l
"""Return the default library for this installation."""
rc = config(path=path, root=root, db=db )
return _l.new_library(rc) | def function[get_library, parameter[path, root, db]]:
import module[ambry.library] as alias[_l]
constant[Return the default library for this installation.]
variable[rc] assign[=] call[name[config], parameter[]]
return[call[name[_l].new_library, parameter[name[rc]]]] | keyword[def] identifier[get_library] ( identifier[path] = keyword[None] , identifier[root] = keyword[None] , identifier[db] = keyword[None] ):
keyword[import] identifier[ambry] . identifier[library] keyword[as] identifier[_l]
literal[string]
identifier[rc] = identifier[config] ( identifier[path] = identifier[path] , identifier[root] = identifier[root] , identifier[db] = identifier[db] )
keyword[return] identifier[_l] . identifier[new_library] ( identifier[rc] ) | def get_library(path=None, root=None, db=None):
import ambry.library as _l
'Return the default library for this installation.'
rc = config(path=path, root=root, db=db)
return _l.new_library(rc) |
def percentile(a, q):
"""
Compute the qth percentile of the data along the specified axis.
Simpler version than the numpy version that always flattens input arrays.
Examples
--------
>>> a = [[10, 7, 4], [3, 2, 1]]
>>> percentile(a, 20)
2.0
>>> percentile(a, 50)
3.5
>>> percentile(a, [20, 80])
[2.0, 7.0]
>>> a = list(range(40))
>>> percentile(a, 25)
9.75
:param a: Input array or object that can be converted to an array.
:param q: Percentile to compute, which must be between 0 and 100 inclusive.
:return: the qth percentile(s) of the array elements.
"""
if not a:
return None
if isinstance(q, (float, int)):
qq = [q]
elif isinstance(q, (tuple, list)):
qq = q
else:
raise ValueError("Quantile type {} not understood".format(type(q)))
if isinstance(a, (float, int)):
a = [a]
for i in range(len(qq)):
if qq[i] < 0. or qq[i] > 100.:
raise ValueError("Percentiles must be in the range [0,100]")
qq[i] /= 100.
a = sorted(flatten(a))
r = []
for q in qq:
k = (len(a) - 1) * q
f = math.floor(k)
c = math.ceil(k)
if f == c:
r.append(float(a[int(k)]))
continue
d0 = a[int(f)] * (c - k)
d1 = a[int(c)] * (k - f)
r.append(float(d0 + d1))
if len(r) == 1:
return r[0]
return r | def function[percentile, parameter[a, q]]:
constant[
Compute the qth percentile of the data along the specified axis.
Simpler version than the numpy version that always flattens input arrays.
Examples
--------
>>> a = [[10, 7, 4], [3, 2, 1]]
>>> percentile(a, 20)
2.0
>>> percentile(a, 50)
3.5
>>> percentile(a, [20, 80])
[2.0, 7.0]
>>> a = list(range(40))
>>> percentile(a, 25)
9.75
:param a: Input array or object that can be converted to an array.
:param q: Percentile to compute, which must be between 0 and 100 inclusive.
:return: the qth percentile(s) of the array elements.
]
if <ast.UnaryOp object at 0x7da1b24602e0> begin[:]
return[constant[None]]
if call[name[isinstance], parameter[name[q], tuple[[<ast.Name object at 0x7da1b24635e0>, <ast.Name object at 0x7da1b2462650>]]]] begin[:]
variable[qq] assign[=] list[[<ast.Name object at 0x7da1b2462c80>]]
if call[name[isinstance], parameter[name[a], tuple[[<ast.Name object at 0x7da1b2461e10>, <ast.Name object at 0x7da1b2461840>]]]] begin[:]
variable[a] assign[=] list[[<ast.Name object at 0x7da1b2462530>]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[qq]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b2462680> begin[:]
<ast.Raise object at 0x7da1b24621a0>
<ast.AugAssign object at 0x7da1b2461210>
variable[a] assign[=] call[name[sorted], parameter[call[name[flatten], parameter[name[a]]]]]
variable[r] assign[=] list[[]]
for taget[name[q]] in starred[name[qq]] begin[:]
variable[k] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[a]]] - constant[1]] * name[q]]
variable[f] assign[=] call[name[math].floor, parameter[name[k]]]
variable[c] assign[=] call[name[math].ceil, parameter[name[k]]]
if compare[name[f] equal[==] name[c]] begin[:]
call[name[r].append, parameter[call[name[float], parameter[call[name[a]][call[name[int], parameter[name[k]]]]]]]]
continue
variable[d0] assign[=] binary_operation[call[name[a]][call[name[int], parameter[name[f]]]] * binary_operation[name[c] - name[k]]]
variable[d1] assign[=] binary_operation[call[name[a]][call[name[int], parameter[name[c]]]] * binary_operation[name[k] - name[f]]]
call[name[r].append, parameter[call[name[float], parameter[binary_operation[name[d0] + name[d1]]]]]]
if compare[call[name[len], parameter[name[r]]] equal[==] constant[1]] begin[:]
return[call[name[r]][constant[0]]]
return[name[r]] | keyword[def] identifier[percentile] ( identifier[a] , identifier[q] ):
literal[string]
keyword[if] keyword[not] identifier[a] :
keyword[return] keyword[None]
keyword[if] identifier[isinstance] ( identifier[q] ,( identifier[float] , identifier[int] )):
identifier[qq] =[ identifier[q] ]
keyword[elif] identifier[isinstance] ( identifier[q] ,( identifier[tuple] , identifier[list] )):
identifier[qq] = identifier[q]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[q] )))
keyword[if] identifier[isinstance] ( identifier[a] ,( identifier[float] , identifier[int] )):
identifier[a] =[ identifier[a] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[qq] )):
keyword[if] identifier[qq] [ identifier[i] ]< literal[int] keyword[or] identifier[qq] [ identifier[i] ]> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[qq] [ identifier[i] ]/= literal[int]
identifier[a] = identifier[sorted] ( identifier[flatten] ( identifier[a] ))
identifier[r] =[]
keyword[for] identifier[q] keyword[in] identifier[qq] :
identifier[k] =( identifier[len] ( identifier[a] )- literal[int] )* identifier[q]
identifier[f] = identifier[math] . identifier[floor] ( identifier[k] )
identifier[c] = identifier[math] . identifier[ceil] ( identifier[k] )
keyword[if] identifier[f] == identifier[c] :
identifier[r] . identifier[append] ( identifier[float] ( identifier[a] [ identifier[int] ( identifier[k] )]))
keyword[continue]
identifier[d0] = identifier[a] [ identifier[int] ( identifier[f] )]*( identifier[c] - identifier[k] )
identifier[d1] = identifier[a] [ identifier[int] ( identifier[c] )]*( identifier[k] - identifier[f] )
identifier[r] . identifier[append] ( identifier[float] ( identifier[d0] + identifier[d1] ))
keyword[if] identifier[len] ( identifier[r] )== literal[int] :
keyword[return] identifier[r] [ literal[int] ]
keyword[return] identifier[r] | def percentile(a, q):
"""
Compute the qth percentile of the data along the specified axis.
Simpler version than the numpy version that always flattens input arrays.
Examples
--------
>>> a = [[10, 7, 4], [3, 2, 1]]
>>> percentile(a, 20)
2.0
>>> percentile(a, 50)
3.5
>>> percentile(a, [20, 80])
[2.0, 7.0]
>>> a = list(range(40))
>>> percentile(a, 25)
9.75
:param a: Input array or object that can be converted to an array.
:param q: Percentile to compute, which must be between 0 and 100 inclusive.
:return: the qth percentile(s) of the array elements.
"""
if not a:
return None # depends on [control=['if'], data=[]]
if isinstance(q, (float, int)):
qq = [q] # depends on [control=['if'], data=[]]
elif isinstance(q, (tuple, list)):
qq = q # depends on [control=['if'], data=[]]
else:
raise ValueError('Quantile type {} not understood'.format(type(q)))
if isinstance(a, (float, int)):
a = [a] # depends on [control=['if'], data=[]]
for i in range(len(qq)):
if qq[i] < 0.0 or qq[i] > 100.0:
raise ValueError('Percentiles must be in the range [0,100]') # depends on [control=['if'], data=[]]
qq[i] /= 100.0 # depends on [control=['for'], data=['i']]
a = sorted(flatten(a))
r = []
for q in qq:
k = (len(a) - 1) * q
f = math.floor(k)
c = math.ceil(k)
if f == c:
r.append(float(a[int(k)]))
continue # depends on [control=['if'], data=[]]
d0 = a[int(f)] * (c - k)
d1 = a[int(c)] * (k - f)
r.append(float(d0 + d1)) # depends on [control=['for'], data=['q']]
if len(r) == 1:
return r[0] # depends on [control=['if'], data=[]]
return r |
def sign_with_privkey(
digest: bytes,
privkey: Ed25519PrivateKey,
global_pubkey: Ed25519PublicPoint,
nonce: int,
global_commit: Ed25519PublicPoint,
) -> Ed25519Signature:
"""Create a CoSi signature of `digest` with the supplied private key.
This function needs to know the global public key and global commitment.
"""
h = _ed25519.H(privkey)
a = _ed25519.decodecoord(h)
S = (nonce + _ed25519.Hint(global_commit + global_pubkey + digest) * a) % _ed25519.l
return Ed25519Signature(_ed25519.encodeint(S)) | def function[sign_with_privkey, parameter[digest, privkey, global_pubkey, nonce, global_commit]]:
constant[Create a CoSi signature of `digest` with the supplied private key.
This function needs to know the global public key and global commitment.
]
variable[h] assign[=] call[name[_ed25519].H, parameter[name[privkey]]]
variable[a] assign[=] call[name[_ed25519].decodecoord, parameter[name[h]]]
variable[S] assign[=] binary_operation[binary_operation[name[nonce] + binary_operation[call[name[_ed25519].Hint, parameter[binary_operation[binary_operation[name[global_commit] + name[global_pubkey]] + name[digest]]]] * name[a]]] <ast.Mod object at 0x7da2590d6920> name[_ed25519].l]
return[call[name[Ed25519Signature], parameter[call[name[_ed25519].encodeint, parameter[name[S]]]]]] | keyword[def] identifier[sign_with_privkey] (
identifier[digest] : identifier[bytes] ,
identifier[privkey] : identifier[Ed25519PrivateKey] ,
identifier[global_pubkey] : identifier[Ed25519PublicPoint] ,
identifier[nonce] : identifier[int] ,
identifier[global_commit] : identifier[Ed25519PublicPoint] ,
)-> identifier[Ed25519Signature] :
literal[string]
identifier[h] = identifier[_ed25519] . identifier[H] ( identifier[privkey] )
identifier[a] = identifier[_ed25519] . identifier[decodecoord] ( identifier[h] )
identifier[S] =( identifier[nonce] + identifier[_ed25519] . identifier[Hint] ( identifier[global_commit] + identifier[global_pubkey] + identifier[digest] )* identifier[a] )% identifier[_ed25519] . identifier[l]
keyword[return] identifier[Ed25519Signature] ( identifier[_ed25519] . identifier[encodeint] ( identifier[S] )) | def sign_with_privkey(digest: bytes, privkey: Ed25519PrivateKey, global_pubkey: Ed25519PublicPoint, nonce: int, global_commit: Ed25519PublicPoint) -> Ed25519Signature:
"""Create a CoSi signature of `digest` with the supplied private key.
This function needs to know the global public key and global commitment.
"""
h = _ed25519.H(privkey)
a = _ed25519.decodecoord(h)
S = (nonce + _ed25519.Hint(global_commit + global_pubkey + digest) * a) % _ed25519.l
return Ed25519Signature(_ed25519.encodeint(S)) |
def is_done(self, submissionid_or_submission, user_check=True):
""" Tells if a submission is done and its result is available """
# TODO: not a very nice way to avoid too many database call. Should be refactored.
if isinstance(submissionid_or_submission, dict):
submission = submissionid_or_submission
else:
submission = self.get_submission(submissionid_or_submission, False)
if user_check and not self.user_is_submission_owner(submission):
return None
return submission["status"] == "done" or submission["status"] == "error" | def function[is_done, parameter[self, submissionid_or_submission, user_check]]:
constant[ Tells if a submission is done and its result is available ]
if call[name[isinstance], parameter[name[submissionid_or_submission], name[dict]]] begin[:]
variable[submission] assign[=] name[submissionid_or_submission]
if <ast.BoolOp object at 0x7da18bc70af0> begin[:]
return[constant[None]]
return[<ast.BoolOp object at 0x7da18bc71030>] | keyword[def] identifier[is_done] ( identifier[self] , identifier[submissionid_or_submission] , identifier[user_check] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[submissionid_or_submission] , identifier[dict] ):
identifier[submission] = identifier[submissionid_or_submission]
keyword[else] :
identifier[submission] = identifier[self] . identifier[get_submission] ( identifier[submissionid_or_submission] , keyword[False] )
keyword[if] identifier[user_check] keyword[and] keyword[not] identifier[self] . identifier[user_is_submission_owner] ( identifier[submission] ):
keyword[return] keyword[None]
keyword[return] identifier[submission] [ literal[string] ]== literal[string] keyword[or] identifier[submission] [ literal[string] ]== literal[string] | def is_done(self, submissionid_or_submission, user_check=True):
""" Tells if a submission is done and its result is available """
# TODO: not a very nice way to avoid too many database call. Should be refactored.
if isinstance(submissionid_or_submission, dict):
submission = submissionid_or_submission # depends on [control=['if'], data=[]]
else:
submission = self.get_submission(submissionid_or_submission, False)
if user_check and (not self.user_is_submission_owner(submission)):
return None # depends on [control=['if'], data=[]]
return submission['status'] == 'done' or submission['status'] == 'error' |
def _M2_const(Xvar, mask_X, xvarsum, xconst, Yvar, mask_Y, yvarsum, yconst, weights=None):
r""" Computes the unnormalized covariance matrix between X and Y, exploiting constant input columns
Computes the unnormalized covariance matrix :math:`C = X^\top Y`
(for symmetric=False) or :math:`C = \frac{1}{2} (X^\top Y + Y^\top X)`
(for symmetric=True). Suppose the data matrices can be column-permuted
to have the form
.. math:
X &=& (X_{\mathrm{var}}, X_{\mathrm{const}})
Y &=& (Y_{\mathrm{var}}, Y_{\mathrm{const}})
with rows:
.. math:
x_t &=& (x_{\mathrm{var},t}, x_{\mathrm{const}})
y_t &=& (y_{\mathrm{var},t}, y_{\mathrm{const}})
where :math:`x_{\mathrm{const}},\:y_{\mathrm{const}}` are constant vectors.
The resulting matrix has the general form:
.. math:
C &=& [X_{\mathrm{var}}^\top Y_{\mathrm{var}} x_{sum} y_{\mathrm{const}}^\top ]
& & [x_{\mathrm{const}}^\top y_{sum}^\top x_{sum} x_{sum}^\top ]
where :math:`x_{sum} = \sum_t x_{\mathrm{var},t}` and
:math:`y_{sum} = \sum_t y_{\mathrm{var},t}`.
Parameters
----------
Xvar : ndarray (T, m)
Part of the data matrix X with :math:`m \le M` variable columns.
mask_X : ndarray (M)
Boolean array of size M of the full columns. False for constant column,
True for variable column in X.
xvarsum : ndarray (m)
Column sum of variable part of data matrix X
xconst : ndarray (M-m)
Values of the constant part of data matrix X
Yvar : ndarray (T, n)
Part of the data matrix Y with :math:`n \le N` variable columns.
mask_Y : ndarray (N)
Boolean array of size N of the full columns. False for constant column,
True for variable column in Y.
yvarsum : ndarray (n)
Column sum of variable part of data matrix Y
yconst : ndarray (N-n)
Values of the constant part of data matrix Y
weights : None or ndarray (N)
weights for all time steps.
Returns
-------
C : ndarray (M, N)
Unnormalized covariance matrix.
"""
C = np.zeros((len(mask_X), len(mask_Y)))
# Block 11
C[np.ix_(mask_X, mask_Y)] = _M2_dense(Xvar, Yvar, weights=weights)
# other blocks
xsum_is_0 = _is_zero(xvarsum)
ysum_is_0 = _is_zero(yvarsum)
xconst_is_0 = _is_zero(xconst)
yconst_is_0 = _is_zero(yconst)
# TODO: maybe we don't need the checking here, if we do the decision in the higher-level function M2
# TODO: if not zero, we could still exploit the zeros in const and compute (and write!) this outer product
# TODO: only to a sub-matrix
# Block 12 and 21
if weights is not None:
wsum = np.sum(weights)
xvarsum = np.sum(weights[:, None] * Xvar, axis=0)
yvarsum = np.sum(weights[:, None] * Yvar, axis=0)
else:
wsum = Xvar.shape[0]
if not (xsum_is_0 or yconst_is_0) or not (ysum_is_0 or xconst_is_0):
C[np.ix_(mask_X, ~mask_Y)] = np.outer(xvarsum, yconst)
C[np.ix_(~mask_X, mask_Y)] = np.outer(xconst, yvarsum)
# Block 22
if not (xconst_is_0 or yconst_is_0):
C[np.ix_(~mask_X, ~mask_Y)] = np.outer(wsum*xconst, yconst)
return C | def function[_M2_const, parameter[Xvar, mask_X, xvarsum, xconst, Yvar, mask_Y, yvarsum, yconst, weights]]:
constant[ Computes the unnormalized covariance matrix between X and Y, exploiting constant input columns
Computes the unnormalized covariance matrix :math:`C = X^\top Y`
(for symmetric=False) or :math:`C = \frac{1}{2} (X^\top Y + Y^\top X)`
(for symmetric=True). Suppose the data matrices can be column-permuted
to have the form
.. math:
X &=& (X_{\mathrm{var}}, X_{\mathrm{const}})
Y &=& (Y_{\mathrm{var}}, Y_{\mathrm{const}})
with rows:
.. math:
x_t &=& (x_{\mathrm{var},t}, x_{\mathrm{const}})
y_t &=& (y_{\mathrm{var},t}, y_{\mathrm{const}})
where :math:`x_{\mathrm{const}},\:y_{\mathrm{const}}` are constant vectors.
The resulting matrix has the general form:
.. math:
C &=& [X_{\mathrm{var}}^\top Y_{\mathrm{var}} x_{sum} y_{\mathrm{const}}^\top ]
& & [x_{\mathrm{const}}^\top y_{sum}^\top x_{sum} x_{sum}^\top ]
where :math:`x_{sum} = \sum_t x_{\mathrm{var},t}` and
:math:`y_{sum} = \sum_t y_{\mathrm{var},t}`.
Parameters
----------
Xvar : ndarray (T, m)
Part of the data matrix X with :math:`m \le M` variable columns.
mask_X : ndarray (M)
Boolean array of size M of the full columns. False for constant column,
True for variable column in X.
xvarsum : ndarray (m)
Column sum of variable part of data matrix X
xconst : ndarray (M-m)
Values of the constant part of data matrix X
Yvar : ndarray (T, n)
Part of the data matrix Y with :math:`n \le N` variable columns.
mask_Y : ndarray (N)
Boolean array of size N of the full columns. False for constant column,
True for variable column in Y.
yvarsum : ndarray (n)
Column sum of variable part of data matrix Y
yconst : ndarray (N-n)
Values of the constant part of data matrix Y
weights : None or ndarray (N)
weights for all time steps.
Returns
-------
C : ndarray (M, N)
Unnormalized covariance matrix.
]
variable[C] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da20c6e51b0>, <ast.Call object at 0x7da20c6e5270>]]]]
call[name[C]][call[name[np].ix_, parameter[name[mask_X], name[mask_Y]]]] assign[=] call[name[_M2_dense], parameter[name[Xvar], name[Yvar]]]
variable[xsum_is_0] assign[=] call[name[_is_zero], parameter[name[xvarsum]]]
variable[ysum_is_0] assign[=] call[name[_is_zero], parameter[name[yvarsum]]]
variable[xconst_is_0] assign[=] call[name[_is_zero], parameter[name[xconst]]]
variable[yconst_is_0] assign[=] call[name[_is_zero], parameter[name[yconst]]]
if compare[name[weights] is_not constant[None]] begin[:]
variable[wsum] assign[=] call[name[np].sum, parameter[name[weights]]]
variable[xvarsum] assign[=] call[name[np].sum, parameter[binary_operation[call[name[weights]][tuple[[<ast.Slice object at 0x7da18bcc8370>, <ast.Constant object at 0x7da18bcc9270>]]] * name[Xvar]]]]
variable[yvarsum] assign[=] call[name[np].sum, parameter[binary_operation[call[name[weights]][tuple[[<ast.Slice object at 0x7da18bcc8700>, <ast.Constant object at 0x7da18bccae00>]]] * name[Yvar]]]]
if <ast.BoolOp object at 0x7da18bcc8280> begin[:]
call[name[C]][call[name[np].ix_, parameter[name[mask_X], <ast.UnaryOp object at 0x7da18bccb520>]]] assign[=] call[name[np].outer, parameter[name[xvarsum], name[yconst]]]
call[name[C]][call[name[np].ix_, parameter[<ast.UnaryOp object at 0x7da18bcc9210>, name[mask_Y]]]] assign[=] call[name[np].outer, parameter[name[xconst], name[yvarsum]]]
if <ast.UnaryOp object at 0x7da18bcca170> begin[:]
call[name[C]][call[name[np].ix_, parameter[<ast.UnaryOp object at 0x7da18bcc8f40>, <ast.UnaryOp object at 0x7da18bccae60>]]] assign[=] call[name[np].outer, parameter[binary_operation[name[wsum] * name[xconst]], name[yconst]]]
return[name[C]] | keyword[def] identifier[_M2_const] ( identifier[Xvar] , identifier[mask_X] , identifier[xvarsum] , identifier[xconst] , identifier[Yvar] , identifier[mask_Y] , identifier[yvarsum] , identifier[yconst] , identifier[weights] = keyword[None] ):
literal[string]
identifier[C] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[mask_X] ), identifier[len] ( identifier[mask_Y] )))
identifier[C] [ identifier[np] . identifier[ix_] ( identifier[mask_X] , identifier[mask_Y] )]= identifier[_M2_dense] ( identifier[Xvar] , identifier[Yvar] , identifier[weights] = identifier[weights] )
identifier[xsum_is_0] = identifier[_is_zero] ( identifier[xvarsum] )
identifier[ysum_is_0] = identifier[_is_zero] ( identifier[yvarsum] )
identifier[xconst_is_0] = identifier[_is_zero] ( identifier[xconst] )
identifier[yconst_is_0] = identifier[_is_zero] ( identifier[yconst] )
keyword[if] identifier[weights] keyword[is] keyword[not] keyword[None] :
identifier[wsum] = identifier[np] . identifier[sum] ( identifier[weights] )
identifier[xvarsum] = identifier[np] . identifier[sum] ( identifier[weights] [:, keyword[None] ]* identifier[Xvar] , identifier[axis] = literal[int] )
identifier[yvarsum] = identifier[np] . identifier[sum] ( identifier[weights] [:, keyword[None] ]* identifier[Yvar] , identifier[axis] = literal[int] )
keyword[else] :
identifier[wsum] = identifier[Xvar] . identifier[shape] [ literal[int] ]
keyword[if] keyword[not] ( identifier[xsum_is_0] keyword[or] identifier[yconst_is_0] ) keyword[or] keyword[not] ( identifier[ysum_is_0] keyword[or] identifier[xconst_is_0] ):
identifier[C] [ identifier[np] . identifier[ix_] ( identifier[mask_X] ,~ identifier[mask_Y] )]= identifier[np] . identifier[outer] ( identifier[xvarsum] , identifier[yconst] )
identifier[C] [ identifier[np] . identifier[ix_] (~ identifier[mask_X] , identifier[mask_Y] )]= identifier[np] . identifier[outer] ( identifier[xconst] , identifier[yvarsum] )
keyword[if] keyword[not] ( identifier[xconst_is_0] keyword[or] identifier[yconst_is_0] ):
identifier[C] [ identifier[np] . identifier[ix_] (~ identifier[mask_X] ,~ identifier[mask_Y] )]= identifier[np] . identifier[outer] ( identifier[wsum] * identifier[xconst] , identifier[yconst] )
keyword[return] identifier[C] | def _M2_const(Xvar, mask_X, xvarsum, xconst, Yvar, mask_Y, yvarsum, yconst, weights=None):
""" Computes the unnormalized covariance matrix between X and Y, exploiting constant input columns
Computes the unnormalized covariance matrix :math:`C = X^\\top Y`
(for symmetric=False) or :math:`C = \\frac{1}{2} (X^\\top Y + Y^\\top X)`
(for symmetric=True). Suppose the data matrices can be column-permuted
to have the form
.. math:
X &=& (X_{\\mathrm{var}}, X_{\\mathrm{const}})
Y &=& (Y_{\\mathrm{var}}, Y_{\\mathrm{const}})
with rows:
.. math:
x_t &=& (x_{\\mathrm{var},t}, x_{\\mathrm{const}})
y_t &=& (y_{\\mathrm{var},t}, y_{\\mathrm{const}})
where :math:`x_{\\mathrm{const}},\\:y_{\\mathrm{const}}` are constant vectors.
The resulting matrix has the general form:
.. math:
C &=& [X_{\\mathrm{var}}^\\top Y_{\\mathrm{var}} x_{sum} y_{\\mathrm{const}}^\\top ]
& & [x_{\\mathrm{const}}^\\top y_{sum}^\\top x_{sum} x_{sum}^\\top ]
where :math:`x_{sum} = \\sum_t x_{\\mathrm{var},t}` and
:math:`y_{sum} = \\sum_t y_{\\mathrm{var},t}`.
Parameters
----------
Xvar : ndarray (T, m)
Part of the data matrix X with :math:`m \\le M` variable columns.
mask_X : ndarray (M)
Boolean array of size M of the full columns. False for constant column,
True for variable column in X.
xvarsum : ndarray (m)
Column sum of variable part of data matrix X
xconst : ndarray (M-m)
Values of the constant part of data matrix X
Yvar : ndarray (T, n)
Part of the data matrix Y with :math:`n \\le N` variable columns.
mask_Y : ndarray (N)
Boolean array of size N of the full columns. False for constant column,
True for variable column in Y.
yvarsum : ndarray (n)
Column sum of variable part of data matrix Y
yconst : ndarray (N-n)
Values of the constant part of data matrix Y
weights : None or ndarray (N)
weights for all time steps.
Returns
-------
C : ndarray (M, N)
Unnormalized covariance matrix.
"""
C = np.zeros((len(mask_X), len(mask_Y)))
# Block 11
C[np.ix_(mask_X, mask_Y)] = _M2_dense(Xvar, Yvar, weights=weights)
# other blocks
xsum_is_0 = _is_zero(xvarsum)
ysum_is_0 = _is_zero(yvarsum)
xconst_is_0 = _is_zero(xconst)
yconst_is_0 = _is_zero(yconst)
# TODO: maybe we don't need the checking here, if we do the decision in the higher-level function M2
# TODO: if not zero, we could still exploit the zeros in const and compute (and write!) this outer product
# TODO: only to a sub-matrix
# Block 12 and 21
if weights is not None:
wsum = np.sum(weights)
xvarsum = np.sum(weights[:, None] * Xvar, axis=0)
yvarsum = np.sum(weights[:, None] * Yvar, axis=0) # depends on [control=['if'], data=['weights']]
else:
wsum = Xvar.shape[0]
if not (xsum_is_0 or yconst_is_0) or not (ysum_is_0 or xconst_is_0):
C[np.ix_(mask_X, ~mask_Y)] = np.outer(xvarsum, yconst)
C[np.ix_(~mask_X, mask_Y)] = np.outer(xconst, yvarsum) # depends on [control=['if'], data=[]]
# Block 22
if not (xconst_is_0 or yconst_is_0):
C[np.ix_(~mask_X, ~mask_Y)] = np.outer(wsum * xconst, yconst) # depends on [control=['if'], data=[]]
return C |
def createKeyPair(type, bits):
"""
Create a public/private key pair.
Arguments: type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
"""
pkey = crypto.PKey()
pkey.generate_key(type, bits)
return pkey | def function[createKeyPair, parameter[type, bits]]:
constant[
Create a public/private key pair.
Arguments: type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
]
variable[pkey] assign[=] call[name[crypto].PKey, parameter[]]
call[name[pkey].generate_key, parameter[name[type], name[bits]]]
return[name[pkey]] | keyword[def] identifier[createKeyPair] ( identifier[type] , identifier[bits] ):
literal[string]
identifier[pkey] = identifier[crypto] . identifier[PKey] ()
identifier[pkey] . identifier[generate_key] ( identifier[type] , identifier[bits] )
keyword[return] identifier[pkey] | def createKeyPair(type, bits):
"""
Create a public/private key pair.
Arguments: type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
"""
pkey = crypto.PKey()
pkey.generate_key(type, bits)
return pkey |
def translate_noetic(s):
"""M:.O:.-O:.O:.-B:.T:.n.-' => s.M:O:.O:O:.-"""
subst, attr, mode = s
return m(script('s.'),
m(subst.children[0].children[0], subst.children[1].children[0]),
m(attr.children[0].children[0], attr.children[1].children[0])) | def function[translate_noetic, parameter[s]]:
constant[M:.O:.-O:.O:.-B:.T:.n.-' => s.M:O:.O:O:.-]
<ast.Tuple object at 0x7da1b032c610> assign[=] name[s]
return[call[name[m], parameter[call[name[script], parameter[constant[s.]]], call[name[m], parameter[call[call[name[subst].children][constant[0]].children][constant[0]], call[call[name[subst].children][constant[1]].children][constant[0]]]], call[name[m], parameter[call[call[name[attr].children][constant[0]].children][constant[0]], call[call[name[attr].children][constant[1]].children][constant[0]]]]]]] | keyword[def] identifier[translate_noetic] ( identifier[s] ):
literal[string]
identifier[subst] , identifier[attr] , identifier[mode] = identifier[s]
keyword[return] identifier[m] ( identifier[script] ( literal[string] ),
identifier[m] ( identifier[subst] . identifier[children] [ literal[int] ]. identifier[children] [ literal[int] ], identifier[subst] . identifier[children] [ literal[int] ]. identifier[children] [ literal[int] ]),
identifier[m] ( identifier[attr] . identifier[children] [ literal[int] ]. identifier[children] [ literal[int] ], identifier[attr] . identifier[children] [ literal[int] ]. identifier[children] [ literal[int] ])) | def translate_noetic(s):
"""M:.O:.-O:.O:.-B:.T:.n.-' => s.M:O:.O:O:.-"""
(subst, attr, mode) = s
return m(script('s.'), m(subst.children[0].children[0], subst.children[1].children[0]), m(attr.children[0].children[0], attr.children[1].children[0])) |
def _retrieve_archive():
"""
Returns a tuple with archive content: the first element is a TodoListBase
and the second element is a TodoFile.
"""
archive_file = TodoFile.TodoFile(config().archive())
archive = TodoListBase.TodoListBase(archive_file.read())
return (archive, archive_file) | def function[_retrieve_archive, parameter[]]:
constant[
Returns a tuple with archive content: the first element is a TodoListBase
and the second element is a TodoFile.
]
variable[archive_file] assign[=] call[name[TodoFile].TodoFile, parameter[call[call[name[config], parameter[]].archive, parameter[]]]]
variable[archive] assign[=] call[name[TodoListBase].TodoListBase, parameter[call[name[archive_file].read, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da1b2346a70>, <ast.Name object at 0x7da1b2344550>]]] | keyword[def] identifier[_retrieve_archive] ():
literal[string]
identifier[archive_file] = identifier[TodoFile] . identifier[TodoFile] ( identifier[config] (). identifier[archive] ())
identifier[archive] = identifier[TodoListBase] . identifier[TodoListBase] ( identifier[archive_file] . identifier[read] ())
keyword[return] ( identifier[archive] , identifier[archive_file] ) | def _retrieve_archive():
"""
Returns a tuple with archive content: the first element is a TodoListBase
and the second element is a TodoFile.
"""
archive_file = TodoFile.TodoFile(config().archive())
archive = TodoListBase.TodoListBase(archive_file.read())
return (archive, archive_file) |
def userlogin(self, event):
"""Checks if an alert is ongoing and alerts the newly connected
client, if so."""
client_uuid = event.clientuuid
self.log(event.user, pretty=True, lvl=verbose)
self.log('Adding client')
self.clients[event.clientuuid] = event.user
for topic, alert in self.alerts.items():
self.alert(client_uuid, alert) | def function[userlogin, parameter[self, event]]:
constant[Checks if an alert is ongoing and alerts the newly connected
client, if so.]
variable[client_uuid] assign[=] name[event].clientuuid
call[name[self].log, parameter[name[event].user]]
call[name[self].log, parameter[constant[Adding client]]]
call[name[self].clients][name[event].clientuuid] assign[=] name[event].user
for taget[tuple[[<ast.Name object at 0x7da1b0f0e860>, <ast.Name object at 0x7da1b0f0d690>]]] in starred[call[name[self].alerts.items, parameter[]]] begin[:]
call[name[self].alert, parameter[name[client_uuid], name[alert]]] | keyword[def] identifier[userlogin] ( identifier[self] , identifier[event] ):
literal[string]
identifier[client_uuid] = identifier[event] . identifier[clientuuid]
identifier[self] . identifier[log] ( identifier[event] . identifier[user] , identifier[pretty] = keyword[True] , identifier[lvl] = identifier[verbose] )
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[clients] [ identifier[event] . identifier[clientuuid] ]= identifier[event] . identifier[user]
keyword[for] identifier[topic] , identifier[alert] keyword[in] identifier[self] . identifier[alerts] . identifier[items] ():
identifier[self] . identifier[alert] ( identifier[client_uuid] , identifier[alert] ) | def userlogin(self, event):
"""Checks if an alert is ongoing and alerts the newly connected
client, if so."""
client_uuid = event.clientuuid
self.log(event.user, pretty=True, lvl=verbose)
self.log('Adding client')
self.clients[event.clientuuid] = event.user
for (topic, alert) in self.alerts.items():
self.alert(client_uuid, alert) # depends on [control=['for'], data=[]] |
def range(self, start, end=None, step=1, numSlices=None):
"""
Create a new RDD of int containing elements from `start` to `end`
(exclusive), increased by `step` every element. Can be called the same
way as python's built-in range() function. If called with a single argument,
the argument is interpreted as `end`, and `start` is set to 0.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numSlices: the number of partitions of the new RDD
:return: An RDD of int
>>> sc.range(5).collect()
[0, 1, 2, 3, 4]
>>> sc.range(2, 4).collect()
[2, 3]
>>> sc.range(1, 7, 2).collect()
[1, 3, 5]
"""
if end is None:
end = start
start = 0
return self.parallelize(xrange(start, end, step), numSlices) | def function[range, parameter[self, start, end, step, numSlices]]:
constant[
Create a new RDD of int containing elements from `start` to `end`
(exclusive), increased by `step` every element. Can be called the same
way as python's built-in range() function. If called with a single argument,
the argument is interpreted as `end`, and `start` is set to 0.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numSlices: the number of partitions of the new RDD
:return: An RDD of int
>>> sc.range(5).collect()
[0, 1, 2, 3, 4]
>>> sc.range(2, 4).collect()
[2, 3]
>>> sc.range(1, 7, 2).collect()
[1, 3, 5]
]
if compare[name[end] is constant[None]] begin[:]
variable[end] assign[=] name[start]
variable[start] assign[=] constant[0]
return[call[name[self].parallelize, parameter[call[name[xrange], parameter[name[start], name[end], name[step]]], name[numSlices]]]] | keyword[def] identifier[range] ( identifier[self] , identifier[start] , identifier[end] = keyword[None] , identifier[step] = literal[int] , identifier[numSlices] = keyword[None] ):
literal[string]
keyword[if] identifier[end] keyword[is] keyword[None] :
identifier[end] = identifier[start]
identifier[start] = literal[int]
keyword[return] identifier[self] . identifier[parallelize] ( identifier[xrange] ( identifier[start] , identifier[end] , identifier[step] ), identifier[numSlices] ) | def range(self, start, end=None, step=1, numSlices=None):
"""
Create a new RDD of int containing elements from `start` to `end`
(exclusive), increased by `step` every element. Can be called the same
way as python's built-in range() function. If called with a single argument,
the argument is interpreted as `end`, and `start` is set to 0.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numSlices: the number of partitions of the new RDD
:return: An RDD of int
>>> sc.range(5).collect()
[0, 1, 2, 3, 4]
>>> sc.range(2, 4).collect()
[2, 3]
>>> sc.range(1, 7, 2).collect()
[1, 3, 5]
"""
if end is None:
end = start
start = 0 # depends on [control=['if'], data=['end']]
return self.parallelize(xrange(start, end, step), numSlices) |
def DbPutDeviceAlias(self, argin):
""" Define alias for a given device name
:param argin: Str[0] = device name
Str[1] = alias name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbPutDeviceAlias()")
if len(argin) < 2:
self.warn_stream("DataBase::DbPutDeviceAlias(): insufficient number of arguments ")
th_exc(DB_IncorrectArguments,
"insufficient number of arguments to put device alias",
"DataBase::DbPutDeviceAlias()")
device_name = argin[0]
device_alias = argin[1]
self.db.put_device_alias(device_name, device_alias) | def function[DbPutDeviceAlias, parameter[self, argin]]:
constant[ Define alias for a given device name
:param argin: Str[0] = device name
Str[1] = alias name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid ]
call[name[self]._log.debug, parameter[constant[In DbPutDeviceAlias()]]]
if compare[call[name[len], parameter[name[argin]]] less[<] constant[2]] begin[:]
call[name[self].warn_stream, parameter[constant[DataBase::DbPutDeviceAlias(): insufficient number of arguments ]]]
call[name[th_exc], parameter[name[DB_IncorrectArguments], constant[insufficient number of arguments to put device alias], constant[DataBase::DbPutDeviceAlias()]]]
variable[device_name] assign[=] call[name[argin]][constant[0]]
variable[device_alias] assign[=] call[name[argin]][constant[1]]
call[name[self].db.put_device_alias, parameter[name[device_name], name[device_alias]]] | keyword[def] identifier[DbPutDeviceAlias] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[if] identifier[len] ( identifier[argin] )< literal[int] :
identifier[self] . identifier[warn_stream] ( literal[string] )
identifier[th_exc] ( identifier[DB_IncorrectArguments] ,
literal[string] ,
literal[string] )
identifier[device_name] = identifier[argin] [ literal[int] ]
identifier[device_alias] = identifier[argin] [ literal[int] ]
identifier[self] . identifier[db] . identifier[put_device_alias] ( identifier[device_name] , identifier[device_alias] ) | def DbPutDeviceAlias(self, argin):
""" Define alias for a given device name
:param argin: Str[0] = device name
Str[1] = alias name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug('In DbPutDeviceAlias()')
if len(argin) < 2:
self.warn_stream('DataBase::DbPutDeviceAlias(): insufficient number of arguments ')
th_exc(DB_IncorrectArguments, 'insufficient number of arguments to put device alias', 'DataBase::DbPutDeviceAlias()') # depends on [control=['if'], data=[]]
device_name = argin[0]
device_alias = argin[1]
self.db.put_device_alias(device_name, device_alias) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.