code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def getattr_with_deprecated_properties(obj, item, deprecated_properties):
"""Helper method to use in the getattr method of a class with deprecated properties.
:param obj: Instance of the Class containing the deprecated properties in question.
:type obj: object
:param item: Name of the attribute being requested.
:type item: str
:param deprecated_properties: List of deprecated properties. Each item in the list is a dict with at least a
"to_be_removed_in_version" and "client_property" key to be used in the displayed deprecation warning.
:type deprecated_properties: List[dict]
:return: The new property indicated where available.
:rtype: object
"""
if item in deprecated_properties:
deprecation_message = generate_property_deprecation_message(
to_be_removed_in_version=deprecated_properties[item]['to_be_removed_in_version'],
old_name=item,
new_name=deprecated_properties[item].get('new_property', item),
new_attribute=deprecated_properties[item]['client_property'],
)
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
message=deprecation_message,
category=DeprecationWarning,
stacklevel=2,
)
warnings.simplefilter('default', DeprecationWarning)
client_property = getattr(obj, deprecated_properties[item]['client_property'])
return getattr(client_property, deprecated_properties[item].get('new_property', item))
raise AttributeError("'{class_name}' has no attribute '{item}'".format(
class_name=obj.__class__.__name__,
item=item,
)) | def function[getattr_with_deprecated_properties, parameter[obj, item, deprecated_properties]]:
constant[Helper method to use in the getattr method of a class with deprecated properties.
:param obj: Instance of the Class containing the deprecated properties in question.
:type obj: object
:param item: Name of the attribute being requested.
:type item: str
:param deprecated_properties: List of deprecated properties. Each item in the list is a dict with at least a
"to_be_removed_in_version" and "client_property" key to be used in the displayed deprecation warning.
:type deprecated_properties: List[dict]
:return: The new property indicated where available.
:rtype: object
]
if compare[name[item] in name[deprecated_properties]] begin[:]
variable[deprecation_message] assign[=] call[name[generate_property_deprecation_message], parameter[]]
call[name[warnings].simplefilter, parameter[constant[always], name[DeprecationWarning]]]
call[name[warnings].warn, parameter[]]
call[name[warnings].simplefilter, parameter[constant[default], name[DeprecationWarning]]]
variable[client_property] assign[=] call[name[getattr], parameter[name[obj], call[call[name[deprecated_properties]][name[item]]][constant[client_property]]]]
return[call[name[getattr], parameter[name[client_property], call[call[name[deprecated_properties]][name[item]].get, parameter[constant[new_property], name[item]]]]]]
<ast.Raise object at 0x7da20e963d30> | keyword[def] identifier[getattr_with_deprecated_properties] ( identifier[obj] , identifier[item] , identifier[deprecated_properties] ):
literal[string]
keyword[if] identifier[item] keyword[in] identifier[deprecated_properties] :
identifier[deprecation_message] = identifier[generate_property_deprecation_message] (
identifier[to_be_removed_in_version] = identifier[deprecated_properties] [ identifier[item] ][ literal[string] ],
identifier[old_name] = identifier[item] ,
identifier[new_name] = identifier[deprecated_properties] [ identifier[item] ]. identifier[get] ( literal[string] , identifier[item] ),
identifier[new_attribute] = identifier[deprecated_properties] [ identifier[item] ][ literal[string] ],
)
identifier[warnings] . identifier[simplefilter] ( literal[string] , identifier[DeprecationWarning] )
identifier[warnings] . identifier[warn] (
identifier[message] = identifier[deprecation_message] ,
identifier[category] = identifier[DeprecationWarning] ,
identifier[stacklevel] = literal[int] ,
)
identifier[warnings] . identifier[simplefilter] ( literal[string] , identifier[DeprecationWarning] )
identifier[client_property] = identifier[getattr] ( identifier[obj] , identifier[deprecated_properties] [ identifier[item] ][ literal[string] ])
keyword[return] identifier[getattr] ( identifier[client_property] , identifier[deprecated_properties] [ identifier[item] ]. identifier[get] ( literal[string] , identifier[item] ))
keyword[raise] identifier[AttributeError] ( literal[string] . identifier[format] (
identifier[class_name] = identifier[obj] . identifier[__class__] . identifier[__name__] ,
identifier[item] = identifier[item] ,
)) | def getattr_with_deprecated_properties(obj, item, deprecated_properties):
"""Helper method to use in the getattr method of a class with deprecated properties.
:param obj: Instance of the Class containing the deprecated properties in question.
:type obj: object
:param item: Name of the attribute being requested.
:type item: str
:param deprecated_properties: List of deprecated properties. Each item in the list is a dict with at least a
"to_be_removed_in_version" and "client_property" key to be used in the displayed deprecation warning.
:type deprecated_properties: List[dict]
:return: The new property indicated where available.
:rtype: object
"""
if item in deprecated_properties:
deprecation_message = generate_property_deprecation_message(to_be_removed_in_version=deprecated_properties[item]['to_be_removed_in_version'], old_name=item, new_name=deprecated_properties[item].get('new_property', item), new_attribute=deprecated_properties[item]['client_property'])
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(message=deprecation_message, category=DeprecationWarning, stacklevel=2)
warnings.simplefilter('default', DeprecationWarning)
client_property = getattr(obj, deprecated_properties[item]['client_property'])
return getattr(client_property, deprecated_properties[item].get('new_property', item)) # depends on [control=['if'], data=['item', 'deprecated_properties']]
raise AttributeError("'{class_name}' has no attribute '{item}'".format(class_name=obj.__class__.__name__, item=item)) |
def _send(self, message):
"""
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
"""
params = {
'EsendexUsername': self.get_username(),
'EsendexPassword': self.get_password(),
'EsendexAccount': self.get_account(),
'EsendexOriginator': message.from_phone,
'EsendexRecipient': ",".join(message.to),
'EsendexBody': message.body,
'EsendexPlainText':'1'
}
if ESENDEX_SANDBOX:
params['EsendexTest'] = '1'
response = requests.post(ESENDEX_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise Exception('Bad status code')
else:
return False
if not response.content.startswith(b'Result'):
if not self.fail_silently:
raise Exception('Bad result')
else:
return False
response = self._parse_response(response.content.decode('utf8'))
if ESENDEX_SANDBOX and response['Result'] == 'Test':
return True
else:
if response['Result'].startswith('OK'):
return True
else:
if not self.fail_silently:
raise Exception('Bad result')
return False | def function[_send, parameter[self, message]]:
constant[
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18bc73cd0>, <ast.Constant object at 0x7da18bc733d0>, <ast.Constant object at 0x7da18bc72200>, <ast.Constant object at 0x7da18bc70280>, <ast.Constant object at 0x7da18bc73070>, <ast.Constant object at 0x7da18bc73fd0>, <ast.Constant object at 0x7da18bc73640>], [<ast.Call object at 0x7da18bc725c0>, <ast.Call object at 0x7da18bc70d90>, <ast.Call object at 0x7da18bc72230>, <ast.Attribute object at 0x7da18bc73310>, <ast.Call object at 0x7da18bc73e80>, <ast.Attribute object at 0x7da18bc73550>, <ast.Constant object at 0x7da18bc71f00>]]
if name[ESENDEX_SANDBOX] begin[:]
call[name[params]][constant[EsendexTest]] assign[=] constant[1]
variable[response] assign[=] call[name[requests].post, parameter[name[ESENDEX_API_URL], name[params]]]
if compare[name[response].status_code not_equal[!=] constant[200]] begin[:]
if <ast.UnaryOp object at 0x7da18bc730d0> begin[:]
<ast.Raise object at 0x7da18bc71390>
if <ast.UnaryOp object at 0x7da18bc73820> begin[:]
if <ast.UnaryOp object at 0x7da18bc70d60> begin[:]
<ast.Raise object at 0x7da18bc71540>
variable[response] assign[=] call[name[self]._parse_response, parameter[call[name[response].content.decode, parameter[constant[utf8]]]]]
if <ast.BoolOp object at 0x7da18bc71db0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_send] ( identifier[self] , identifier[message] ):
literal[string]
identifier[params] ={
literal[string] : identifier[self] . identifier[get_username] (),
literal[string] : identifier[self] . identifier[get_password] (),
literal[string] : identifier[self] . identifier[get_account] (),
literal[string] : identifier[message] . identifier[from_phone] ,
literal[string] : literal[string] . identifier[join] ( identifier[message] . identifier[to] ),
literal[string] : identifier[message] . identifier[body] ,
literal[string] : literal[string]
}
keyword[if] identifier[ESENDEX_SANDBOX] :
identifier[params] [ literal[string] ]= literal[string]
identifier[response] = identifier[requests] . identifier[post] ( identifier[ESENDEX_API_URL] , identifier[params] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
keyword[if] keyword[not] identifier[self] . identifier[fail_silently] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[response] . identifier[content] . identifier[startswith] ( literal[string] ):
keyword[if] keyword[not] identifier[self] . identifier[fail_silently] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
keyword[return] keyword[False]
identifier[response] = identifier[self] . identifier[_parse_response] ( identifier[response] . identifier[content] . identifier[decode] ( literal[string] ))
keyword[if] identifier[ESENDEX_SANDBOX] keyword[and] identifier[response] [ literal[string] ]== literal[string] :
keyword[return] keyword[True]
keyword[else] :
keyword[if] identifier[response] [ literal[string] ]. identifier[startswith] ( literal[string] ):
keyword[return] keyword[True]
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[fail_silently] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] keyword[False] | def _send(self, message):
"""
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
"""
params = {'EsendexUsername': self.get_username(), 'EsendexPassword': self.get_password(), 'EsendexAccount': self.get_account(), 'EsendexOriginator': message.from_phone, 'EsendexRecipient': ','.join(message.to), 'EsendexBody': message.body, 'EsendexPlainText': '1'}
if ESENDEX_SANDBOX:
params['EsendexTest'] = '1' # depends on [control=['if'], data=[]]
response = requests.post(ESENDEX_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise Exception('Bad status code') # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
if not response.content.startswith(b'Result'):
if not self.fail_silently:
raise Exception('Bad result') # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
response = self._parse_response(response.content.decode('utf8'))
if ESENDEX_SANDBOX and response['Result'] == 'Test':
return True # depends on [control=['if'], data=[]]
elif response['Result'].startswith('OK'):
return True # depends on [control=['if'], data=[]]
elif not self.fail_silently:
raise Exception('Bad result') # depends on [control=['if'], data=[]]
return False |
def get_gene_modification_language(identifier_qualified: ParserElement) -> ParserElement:
"""Build a gene modification parser."""
gmod_identifier = MatchFirst([
identifier_qualified,
gmod_default_ns,
])
return gmod_tag + nest(
Group(gmod_identifier)(IDENTIFIER)
) | def function[get_gene_modification_language, parameter[identifier_qualified]]:
constant[Build a gene modification parser.]
variable[gmod_identifier] assign[=] call[name[MatchFirst], parameter[list[[<ast.Name object at 0x7da207f9bac0>, <ast.Name object at 0x7da207f9a860>]]]]
return[binary_operation[name[gmod_tag] + call[name[nest], parameter[call[call[name[Group], parameter[name[gmod_identifier]]], parameter[name[IDENTIFIER]]]]]]] | keyword[def] identifier[get_gene_modification_language] ( identifier[identifier_qualified] : identifier[ParserElement] )-> identifier[ParserElement] :
literal[string]
identifier[gmod_identifier] = identifier[MatchFirst] ([
identifier[identifier_qualified] ,
identifier[gmod_default_ns] ,
])
keyword[return] identifier[gmod_tag] + identifier[nest] (
identifier[Group] ( identifier[gmod_identifier] )( identifier[IDENTIFIER] )
) | def get_gene_modification_language(identifier_qualified: ParserElement) -> ParserElement:
"""Build a gene modification parser."""
gmod_identifier = MatchFirst([identifier_qualified, gmod_default_ns])
return gmod_tag + nest(Group(gmod_identifier)(IDENTIFIER)) |
def _to_json_default(obj):
"""Helper to convert non default objects to json.
Usage:
simplejson.dumps(data, default=_to_json_default)
"""
# Datetime
if isinstance(obj, datetime.datetime):
return obj.isoformat()
# UUID
if isinstance(obj, uuid.UUID):
return str(obj)
# numpy
if hasattr(obj, 'item'):
return obj.item()
# # Enum
# if hasattr(obj, 'value'):
# return obj.value
try:
return obj.id
except Exception:
raise TypeError('{obj} is not JSON serializable'.format(obj=repr(obj))) | def function[_to_json_default, parameter[obj]]:
constant[Helper to convert non default objects to json.
Usage:
simplejson.dumps(data, default=_to_json_default)
]
if call[name[isinstance], parameter[name[obj], name[datetime].datetime]] begin[:]
return[call[name[obj].isoformat, parameter[]]]
if call[name[isinstance], parameter[name[obj], name[uuid].UUID]] begin[:]
return[call[name[str], parameter[name[obj]]]]
if call[name[hasattr], parameter[name[obj], constant[item]]] begin[:]
return[call[name[obj].item, parameter[]]]
<ast.Try object at 0x7da20e955780> | keyword[def] identifier[_to_json_default] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[datetime] . identifier[datetime] ):
keyword[return] identifier[obj] . identifier[isoformat] ()
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[uuid] . identifier[UUID] ):
keyword[return] identifier[str] ( identifier[obj] )
keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[return] identifier[obj] . identifier[item] ()
keyword[try] :
keyword[return] identifier[obj] . identifier[id]
keyword[except] identifier[Exception] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[obj] = identifier[repr] ( identifier[obj] ))) | def _to_json_default(obj):
"""Helper to convert non default objects to json.
Usage:
simplejson.dumps(data, default=_to_json_default)
"""
# Datetime
if isinstance(obj, datetime.datetime):
return obj.isoformat() # depends on [control=['if'], data=[]]
# UUID
if isinstance(obj, uuid.UUID):
return str(obj) # depends on [control=['if'], data=[]]
# numpy
if hasattr(obj, 'item'):
return obj.item() # depends on [control=['if'], data=[]]
# # Enum
# if hasattr(obj, 'value'):
# return obj.value
try:
return obj.id # depends on [control=['try'], data=[]]
except Exception:
raise TypeError('{obj} is not JSON serializable'.format(obj=repr(obj))) # depends on [control=['except'], data=[]] |
def __decodeModifier(self, keyCode):
"""
Checks if the given keyCode is a modifier key. If it is, returns the modifier name
constant as defined in the iomediator module. If not, returns C{None}
"""
keyName = self.lookup_string(keyCode, False, False, False)
if keyName in MODIFIERS:
return keyName
return None | def function[__decodeModifier, parameter[self, keyCode]]:
constant[
Checks if the given keyCode is a modifier key. If it is, returns the modifier name
constant as defined in the iomediator module. If not, returns C{None}
]
variable[keyName] assign[=] call[name[self].lookup_string, parameter[name[keyCode], constant[False], constant[False], constant[False]]]
if compare[name[keyName] in name[MODIFIERS]] begin[:]
return[name[keyName]]
return[constant[None]] | keyword[def] identifier[__decodeModifier] ( identifier[self] , identifier[keyCode] ):
literal[string]
identifier[keyName] = identifier[self] . identifier[lookup_string] ( identifier[keyCode] , keyword[False] , keyword[False] , keyword[False] )
keyword[if] identifier[keyName] keyword[in] identifier[MODIFIERS] :
keyword[return] identifier[keyName]
keyword[return] keyword[None] | def __decodeModifier(self, keyCode):
"""
Checks if the given keyCode is a modifier key. If it is, returns the modifier name
constant as defined in the iomediator module. If not, returns C{None}
"""
keyName = self.lookup_string(keyCode, False, False, False)
if keyName in MODIFIERS:
return keyName # depends on [control=['if'], data=['keyName']]
return None |
def predict(self, t, period=None):
"""Compute the best-fit model at ``t`` for a given period
Parameters
----------
t : float or array_like
times at which to predict
period : float (optional)
The period at which to compute the model. If not specified, it
will be computed via the optimizer provided at initialization.
Returns
-------
y : np.ndarray
predicted model values at times t
"""
t = np.asarray(t)
if period is None:
period = self.best_period
result = self._predict(t.ravel(), period=period)
return result.reshape(t.shape) | def function[predict, parameter[self, t, period]]:
constant[Compute the best-fit model at ``t`` for a given period
Parameters
----------
t : float or array_like
times at which to predict
period : float (optional)
The period at which to compute the model. If not specified, it
will be computed via the optimizer provided at initialization.
Returns
-------
y : np.ndarray
predicted model values at times t
]
variable[t] assign[=] call[name[np].asarray, parameter[name[t]]]
if compare[name[period] is constant[None]] begin[:]
variable[period] assign[=] name[self].best_period
variable[result] assign[=] call[name[self]._predict, parameter[call[name[t].ravel, parameter[]]]]
return[call[name[result].reshape, parameter[name[t].shape]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[t] , identifier[period] = keyword[None] ):
literal[string]
identifier[t] = identifier[np] . identifier[asarray] ( identifier[t] )
keyword[if] identifier[period] keyword[is] keyword[None] :
identifier[period] = identifier[self] . identifier[best_period]
identifier[result] = identifier[self] . identifier[_predict] ( identifier[t] . identifier[ravel] (), identifier[period] = identifier[period] )
keyword[return] identifier[result] . identifier[reshape] ( identifier[t] . identifier[shape] ) | def predict(self, t, period=None):
"""Compute the best-fit model at ``t`` for a given period
Parameters
----------
t : float or array_like
times at which to predict
period : float (optional)
The period at which to compute the model. If not specified, it
will be computed via the optimizer provided at initialization.
Returns
-------
y : np.ndarray
predicted model values at times t
"""
t = np.asarray(t)
if period is None:
period = self.best_period # depends on [control=['if'], data=['period']]
result = self._predict(t.ravel(), period=period)
return result.reshape(t.shape) |
def show(db, encoding, no_limit, zip, case_insensitive):
"""Show .dbf file contents (rows)."""
limit = 15
if no_limit:
limit = float('inf')
with open_db(db, zip, encoding=encoding, case_sensitive=not case_insensitive) as dbf:
for idx, row in enumerate(dbf, 1):
click.secho('')
for key, val in row._asdict().items():
click.secho(' %s: %s' % (key, val))
if idx == limit:
click.secho(
'Note: Output is limited to %s rows. Use --no-limit option to bypass.' % limit, fg='red')
break | def function[show, parameter[db, encoding, no_limit, zip, case_insensitive]]:
constant[Show .dbf file contents (rows).]
variable[limit] assign[=] constant[15]
if name[no_limit] begin[:]
variable[limit] assign[=] call[name[float], parameter[constant[inf]]]
with call[name[open_db], parameter[name[db], name[zip]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0a6ca30>, <ast.Name object at 0x7da1b0a6d8d0>]]] in starred[call[name[enumerate], parameter[name[dbf], constant[1]]]] begin[:]
call[name[click].secho, parameter[constant[]]]
for taget[tuple[[<ast.Name object at 0x7da1b0a6c610>, <ast.Name object at 0x7da1b0a6c880>]]] in starred[call[call[name[row]._asdict, parameter[]].items, parameter[]]] begin[:]
call[name[click].secho, parameter[binary_operation[constant[ %s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a6ca90>, <ast.Name object at 0x7da1b0a6d390>]]]]]
if compare[name[idx] equal[==] name[limit]] begin[:]
call[name[click].secho, parameter[binary_operation[constant[Note: Output is limited to %s rows. Use --no-limit option to bypass.] <ast.Mod object at 0x7da2590d6920> name[limit]]]]
break | keyword[def] identifier[show] ( identifier[db] , identifier[encoding] , identifier[no_limit] , identifier[zip] , identifier[case_insensitive] ):
literal[string]
identifier[limit] = literal[int]
keyword[if] identifier[no_limit] :
identifier[limit] = identifier[float] ( literal[string] )
keyword[with] identifier[open_db] ( identifier[db] , identifier[zip] , identifier[encoding] = identifier[encoding] , identifier[case_sensitive] = keyword[not] identifier[case_insensitive] ) keyword[as] identifier[dbf] :
keyword[for] identifier[idx] , identifier[row] keyword[in] identifier[enumerate] ( identifier[dbf] , literal[int] ):
identifier[click] . identifier[secho] ( literal[string] )
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[row] . identifier[_asdict] (). identifier[items] ():
identifier[click] . identifier[secho] ( literal[string] %( identifier[key] , identifier[val] ))
keyword[if] identifier[idx] == identifier[limit] :
identifier[click] . identifier[secho] (
literal[string] % identifier[limit] , identifier[fg] = literal[string] )
keyword[break] | def show(db, encoding, no_limit, zip, case_insensitive):
"""Show .dbf file contents (rows)."""
limit = 15
if no_limit:
limit = float('inf') # depends on [control=['if'], data=[]]
with open_db(db, zip, encoding=encoding, case_sensitive=not case_insensitive) as dbf:
for (idx, row) in enumerate(dbf, 1):
click.secho('')
for (key, val) in row._asdict().items():
click.secho(' %s: %s' % (key, val)) # depends on [control=['for'], data=[]]
if idx == limit:
click.secho('Note: Output is limited to %s rows. Use --no-limit option to bypass.' % limit, fg='red')
break # depends on [control=['if'], data=['limit']] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['dbf']] |
def split_sparse_matrix(matrix, num_categories):
"""
An analog of numpy.split for our sparse matrix. If the number of
categories does not divide the number of rows in the matrix, all
overflow is placed in the final bin.
In the event that there are more categories than rows, all later
categories are considered to be an empty sparse matrix.
"""
if matrix.nRows() < num_categories:
return [matrix.getSlice(i, i+1, 0, matrix.nCols())
for i in range(matrix.nRows())] + [SM32()
for i in range(num_categories - matrix.nRows())]
else:
inc = matrix.nRows()/num_categories
divisions = [matrix.getSlice(i*inc, (i+1)*inc, 0, matrix.nCols())
for i in range(num_categories - 1)]
# Handle the last bin separately. All overflow goes into it.
divisions.append(matrix.getSlice((num_categories - 1)*inc, matrix.nRows(),
0, matrix.nCols()))
return divisions | def function[split_sparse_matrix, parameter[matrix, num_categories]]:
constant[
An analog of numpy.split for our sparse matrix. If the number of
categories does not divide the number of rows in the matrix, all
overflow is placed in the final bin.
In the event that there are more categories than rows, all later
categories are considered to be an empty sparse matrix.
]
if compare[call[name[matrix].nRows, parameter[]] less[<] name[num_categories]] begin[:]
return[binary_operation[<ast.ListComp object at 0x7da1b08b1930> + <ast.ListComp object at 0x7da1b08b2a70>]] | keyword[def] identifier[split_sparse_matrix] ( identifier[matrix] , identifier[num_categories] ):
literal[string]
keyword[if] identifier[matrix] . identifier[nRows] ()< identifier[num_categories] :
keyword[return] [ identifier[matrix] . identifier[getSlice] ( identifier[i] , identifier[i] + literal[int] , literal[int] , identifier[matrix] . identifier[nCols] ())
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[matrix] . identifier[nRows] ())]+[ identifier[SM32] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_categories] - identifier[matrix] . identifier[nRows] ())]
keyword[else] :
identifier[inc] = identifier[matrix] . identifier[nRows] ()/ identifier[num_categories]
identifier[divisions] =[ identifier[matrix] . identifier[getSlice] ( identifier[i] * identifier[inc] ,( identifier[i] + literal[int] )* identifier[inc] , literal[int] , identifier[matrix] . identifier[nCols] ())
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_categories] - literal[int] )]
identifier[divisions] . identifier[append] ( identifier[matrix] . identifier[getSlice] (( identifier[num_categories] - literal[int] )* identifier[inc] , identifier[matrix] . identifier[nRows] (),
literal[int] , identifier[matrix] . identifier[nCols] ()))
keyword[return] identifier[divisions] | def split_sparse_matrix(matrix, num_categories):
"""
An analog of numpy.split for our sparse matrix. If the number of
categories does not divide the number of rows in the matrix, all
overflow is placed in the final bin.
In the event that there are more categories than rows, all later
categories are considered to be an empty sparse matrix.
"""
if matrix.nRows() < num_categories:
return [matrix.getSlice(i, i + 1, 0, matrix.nCols()) for i in range(matrix.nRows())] + [SM32() for i in range(num_categories - matrix.nRows())] # depends on [control=['if'], data=['num_categories']]
else:
inc = matrix.nRows() / num_categories
divisions = [matrix.getSlice(i * inc, (i + 1) * inc, 0, matrix.nCols()) for i in range(num_categories - 1)]
# Handle the last bin separately. All overflow goes into it.
divisions.append(matrix.getSlice((num_categories - 1) * inc, matrix.nRows(), 0, matrix.nCols()))
return divisions |
def _compute_ymean(self, **kwargs):
"""Compute the (weighted) mean of the y data"""
y = np.asarray(kwargs.get('y', self.y))
dy = np.asarray(kwargs.get('dy', self.dy))
if dy.size == 1:
return np.mean(y)
else:
return np.average(y, weights=1 / dy ** 2) | def function[_compute_ymean, parameter[self]]:
constant[Compute the (weighted) mean of the y data]
variable[y] assign[=] call[name[np].asarray, parameter[call[name[kwargs].get, parameter[constant[y], name[self].y]]]]
variable[dy] assign[=] call[name[np].asarray, parameter[call[name[kwargs].get, parameter[constant[dy], name[self].dy]]]]
if compare[name[dy].size equal[==] constant[1]] begin[:]
return[call[name[np].mean, parameter[name[y]]]] | keyword[def] identifier[_compute_ymean] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[y] = identifier[np] . identifier[asarray] ( identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[y] ))
identifier[dy] = identifier[np] . identifier[asarray] ( identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[dy] ))
keyword[if] identifier[dy] . identifier[size] == literal[int] :
keyword[return] identifier[np] . identifier[mean] ( identifier[y] )
keyword[else] :
keyword[return] identifier[np] . identifier[average] ( identifier[y] , identifier[weights] = literal[int] / identifier[dy] ** literal[int] ) | def _compute_ymean(self, **kwargs):
"""Compute the (weighted) mean of the y data"""
y = np.asarray(kwargs.get('y', self.y))
dy = np.asarray(kwargs.get('dy', self.dy))
if dy.size == 1:
return np.mean(y) # depends on [control=['if'], data=[]]
else:
return np.average(y, weights=1 / dy ** 2) |
def _end_channel(self, channel):
"""
Soft end of ssh channel. End the writing thread as soon as the message queue is empty.
"""
self.stop_on_empty_queue[channel] = True
# by joining the we wait until its loop finishes.
# it won't loop forever since we've set self.stop_on_empty_queue=True
write_thread = self.thread_write_instances[channel]
thread_join_non_blocking(write_thread) | def function[_end_channel, parameter[self, channel]]:
constant[
Soft end of ssh channel. End the writing thread as soon as the message queue is empty.
]
call[name[self].stop_on_empty_queue][name[channel]] assign[=] constant[True]
variable[write_thread] assign[=] call[name[self].thread_write_instances][name[channel]]
call[name[thread_join_non_blocking], parameter[name[write_thread]]] | keyword[def] identifier[_end_channel] ( identifier[self] , identifier[channel] ):
literal[string]
identifier[self] . identifier[stop_on_empty_queue] [ identifier[channel] ]= keyword[True]
identifier[write_thread] = identifier[self] . identifier[thread_write_instances] [ identifier[channel] ]
identifier[thread_join_non_blocking] ( identifier[write_thread] ) | def _end_channel(self, channel):
"""
Soft end of ssh channel. End the writing thread as soon as the message queue is empty.
"""
self.stop_on_empty_queue[channel] = True
# by joining the we wait until its loop finishes.
# it won't loop forever since we've set self.stop_on_empty_queue=True
write_thread = self.thread_write_instances[channel]
thread_join_non_blocking(write_thread) |
def is_switched_on(self, refresh=False):
"""Get armed state.
Refresh data from Vera if refresh is True, otherwise use local cache.
Refresh is only needed if you're not using subscriptions.
"""
if refresh:
self.refresh()
val = self.get_value('Armed')
return val == '1' | def function[is_switched_on, parameter[self, refresh]]:
constant[Get armed state.
Refresh data from Vera if refresh is True, otherwise use local cache.
Refresh is only needed if you're not using subscriptions.
]
if name[refresh] begin[:]
call[name[self].refresh, parameter[]]
variable[val] assign[=] call[name[self].get_value, parameter[constant[Armed]]]
return[compare[name[val] equal[==] constant[1]]] | keyword[def] identifier[is_switched_on] ( identifier[self] , identifier[refresh] = keyword[False] ):
literal[string]
keyword[if] identifier[refresh] :
identifier[self] . identifier[refresh] ()
identifier[val] = identifier[self] . identifier[get_value] ( literal[string] )
keyword[return] identifier[val] == literal[string] | def is_switched_on(self, refresh=False):
"""Get armed state.
Refresh data from Vera if refresh is True, otherwise use local cache.
Refresh is only needed if you're not using subscriptions.
"""
if refresh:
self.refresh() # depends on [control=['if'], data=[]]
val = self.get_value('Armed')
return val == '1' |
def task_list(self, pending=True):
"""
Return the list of scoped tasks (ie tasks that have
appropriate roles set) in correct execution order.
The result is a list of task objects.
"""
log = self._params.get('log', self._discard)
tasks = [t for t in self._tasks if t.participant()]
requires = {}
for t in tasks:
requires[t] = t.get_requires(pending=pending)
done = set()
start_order = []
cycle = 0
while len(tasks) > len(start_order):
cycle += 1
changed = False
for t in tasks:
if t._name in done:
continue
needs = 0
for req in requires[t]:
if req._name in done:
needs += 1
if needs == len(requires[t]):
changed = True
start_order.append(t)
done.add(t._name)
log.debug("Found '%s' in scope", t._name)
if not changed:
log.error("Cycle %d failed after %s", cycle, [t._name for t in set(tasks).difference(done)])
raise TaskError(None, "At cycle %d, startup order conflict, processed %s, remaining %s" %
(cycle, done, [t._name for t in set(tasks).difference(done)]))
log.debug("Cycle %d gave %s", cycle, [t._name for t in start_order])
return start_order | def function[task_list, parameter[self, pending]]:
constant[
Return the list of scoped tasks (ie tasks that have
appropriate roles set) in correct execution order.
The result is a list of task objects.
]
variable[log] assign[=] call[name[self]._params.get, parameter[constant[log], name[self]._discard]]
variable[tasks] assign[=] <ast.ListComp object at 0x7da204346770>
variable[requires] assign[=] dictionary[[], []]
for taget[name[t]] in starred[name[tasks]] begin[:]
call[name[requires]][name[t]] assign[=] call[name[t].get_requires, parameter[]]
variable[done] assign[=] call[name[set], parameter[]]
variable[start_order] assign[=] list[[]]
variable[cycle] assign[=] constant[0]
while compare[call[name[len], parameter[name[tasks]]] greater[>] call[name[len], parameter[name[start_order]]]] begin[:]
<ast.AugAssign object at 0x7da204346bf0>
variable[changed] assign[=] constant[False]
for taget[name[t]] in starred[name[tasks]] begin[:]
if compare[name[t]._name in name[done]] begin[:]
continue
variable[needs] assign[=] constant[0]
for taget[name[req]] in starred[call[name[requires]][name[t]]] begin[:]
if compare[name[req]._name in name[done]] begin[:]
<ast.AugAssign object at 0x7da204345db0>
if compare[name[needs] equal[==] call[name[len], parameter[call[name[requires]][name[t]]]]] begin[:]
variable[changed] assign[=] constant[True]
call[name[start_order].append, parameter[name[t]]]
call[name[done].add, parameter[name[t]._name]]
call[name[log].debug, parameter[constant[Found '%s' in scope], name[t]._name]]
if <ast.UnaryOp object at 0x7da204347b20> begin[:]
call[name[log].error, parameter[constant[Cycle %d failed after %s], name[cycle], <ast.ListComp object at 0x7da204347610>]]
<ast.Raise object at 0x7da204347760>
call[name[log].debug, parameter[constant[Cycle %d gave %s], name[cycle], <ast.ListComp object at 0x7da2043456c0>]]
return[name[start_order]] | keyword[def] identifier[task_list] ( identifier[self] , identifier[pending] = keyword[True] ):
literal[string]
identifier[log] = identifier[self] . identifier[_params] . identifier[get] ( literal[string] , identifier[self] . identifier[_discard] )
identifier[tasks] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[_tasks] keyword[if] identifier[t] . identifier[participant] ()]
identifier[requires] ={}
keyword[for] identifier[t] keyword[in] identifier[tasks] :
identifier[requires] [ identifier[t] ]= identifier[t] . identifier[get_requires] ( identifier[pending] = identifier[pending] )
identifier[done] = identifier[set] ()
identifier[start_order] =[]
identifier[cycle] = literal[int]
keyword[while] identifier[len] ( identifier[tasks] )> identifier[len] ( identifier[start_order] ):
identifier[cycle] += literal[int]
identifier[changed] = keyword[False]
keyword[for] identifier[t] keyword[in] identifier[tasks] :
keyword[if] identifier[t] . identifier[_name] keyword[in] identifier[done] :
keyword[continue]
identifier[needs] = literal[int]
keyword[for] identifier[req] keyword[in] identifier[requires] [ identifier[t] ]:
keyword[if] identifier[req] . identifier[_name] keyword[in] identifier[done] :
identifier[needs] += literal[int]
keyword[if] identifier[needs] == identifier[len] ( identifier[requires] [ identifier[t] ]):
identifier[changed] = keyword[True]
identifier[start_order] . identifier[append] ( identifier[t] )
identifier[done] . identifier[add] ( identifier[t] . identifier[_name] )
identifier[log] . identifier[debug] ( literal[string] , identifier[t] . identifier[_name] )
keyword[if] keyword[not] identifier[changed] :
identifier[log] . identifier[error] ( literal[string] , identifier[cycle] ,[ identifier[t] . identifier[_name] keyword[for] identifier[t] keyword[in] identifier[set] ( identifier[tasks] ). identifier[difference] ( identifier[done] )])
keyword[raise] identifier[TaskError] ( keyword[None] , literal[string] %
( identifier[cycle] , identifier[done] ,[ identifier[t] . identifier[_name] keyword[for] identifier[t] keyword[in] identifier[set] ( identifier[tasks] ). identifier[difference] ( identifier[done] )]))
identifier[log] . identifier[debug] ( literal[string] , identifier[cycle] ,[ identifier[t] . identifier[_name] keyword[for] identifier[t] keyword[in] identifier[start_order] ])
keyword[return] identifier[start_order] | def task_list(self, pending=True):
"""
Return the list of scoped tasks (ie tasks that have
appropriate roles set) in correct execution order.
The result is a list of task objects.
"""
log = self._params.get('log', self._discard)
tasks = [t for t in self._tasks if t.participant()]
requires = {}
for t in tasks:
requires[t] = t.get_requires(pending=pending) # depends on [control=['for'], data=['t']]
done = set()
start_order = []
cycle = 0
while len(tasks) > len(start_order):
cycle += 1
changed = False
for t in tasks:
if t._name in done:
continue # depends on [control=['if'], data=[]]
needs = 0
for req in requires[t]:
if req._name in done:
needs += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['req']]
if needs == len(requires[t]):
changed = True
start_order.append(t)
done.add(t._name)
log.debug("Found '%s' in scope", t._name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']]
if not changed:
log.error('Cycle %d failed after %s', cycle, [t._name for t in set(tasks).difference(done)])
raise TaskError(None, 'At cycle %d, startup order conflict, processed %s, remaining %s' % (cycle, done, [t._name for t in set(tasks).difference(done)])) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
log.debug('Cycle %d gave %s', cycle, [t._name for t in start_order])
return start_order |
def data_from_dataset(dataset, X_indexing=None, y_indexing=None):
"""Try to access X and y attribute from dataset.
Also works when dataset is a subset.
Parameters
----------
dataset : skorch.dataset.Dataset or torch.utils.data.Subset
The incoming dataset should be a ``skorch.dataset.Dataset`` or a
``torch.utils.data.Subset`` of a
``skorch.dataset.Dataset``.
X_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the X data. If
None, try to automatically determine how to index data.
y_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the y data. If
None, try to automatically determine how to index data.
"""
X, y = _none, _none
if isinstance(dataset, Subset):
X, y = data_from_dataset(
dataset.dataset, X_indexing=X_indexing, y_indexing=y_indexing)
X = multi_indexing(X, dataset.indices, indexing=X_indexing)
y = multi_indexing(y, dataset.indices, indexing=y_indexing)
elif hasattr(dataset, 'X') and hasattr(dataset, 'y'):
X, y = dataset.X, dataset.y
if (X is _none) or (y is _none):
raise AttributeError("Could not access X and y from dataset.")
return X, y | def function[data_from_dataset, parameter[dataset, X_indexing, y_indexing]]:
constant[Try to access X and y attribute from dataset.
Also works when dataset is a subset.
Parameters
----------
dataset : skorch.dataset.Dataset or torch.utils.data.Subset
The incoming dataset should be a ``skorch.dataset.Dataset`` or a
``torch.utils.data.Subset`` of a
``skorch.dataset.Dataset``.
X_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the X data. If
None, try to automatically determine how to index data.
y_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the y data. If
None, try to automatically determine how to index data.
]
<ast.Tuple object at 0x7da1b0abb1c0> assign[=] tuple[[<ast.Name object at 0x7da1b0aba5f0>, <ast.Name object at 0x7da1b0abb850>]]
if call[name[isinstance], parameter[name[dataset], name[Subset]]] begin[:]
<ast.Tuple object at 0x7da1b0ab91e0> assign[=] call[name[data_from_dataset], parameter[name[dataset].dataset]]
variable[X] assign[=] call[name[multi_indexing], parameter[name[X], name[dataset].indices]]
variable[y] assign[=] call[name[multi_indexing], parameter[name[y], name[dataset].indices]]
if <ast.BoolOp object at 0x7da1b0ab8df0> begin[:]
<ast.Raise object at 0x7da1b0aba920>
return[tuple[[<ast.Name object at 0x7da1b0abb880>, <ast.Name object at 0x7da1b0abada0>]]] | keyword[def] identifier[data_from_dataset] ( identifier[dataset] , identifier[X_indexing] = keyword[None] , identifier[y_indexing] = keyword[None] ):
literal[string]
identifier[X] , identifier[y] = identifier[_none] , identifier[_none]
keyword[if] identifier[isinstance] ( identifier[dataset] , identifier[Subset] ):
identifier[X] , identifier[y] = identifier[data_from_dataset] (
identifier[dataset] . identifier[dataset] , identifier[X_indexing] = identifier[X_indexing] , identifier[y_indexing] = identifier[y_indexing] )
identifier[X] = identifier[multi_indexing] ( identifier[X] , identifier[dataset] . identifier[indices] , identifier[indexing] = identifier[X_indexing] )
identifier[y] = identifier[multi_indexing] ( identifier[y] , identifier[dataset] . identifier[indices] , identifier[indexing] = identifier[y_indexing] )
keyword[elif] identifier[hasattr] ( identifier[dataset] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[dataset] , literal[string] ):
identifier[X] , identifier[y] = identifier[dataset] . identifier[X] , identifier[dataset] . identifier[y]
keyword[if] ( identifier[X] keyword[is] identifier[_none] ) keyword[or] ( identifier[y] keyword[is] identifier[_none] ):
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[return] identifier[X] , identifier[y] | def data_from_dataset(dataset, X_indexing=None, y_indexing=None):
"""Try to access X and y attribute from dataset.
Also works when dataset is a subset.
Parameters
----------
dataset : skorch.dataset.Dataset or torch.utils.data.Subset
The incoming dataset should be a ``skorch.dataset.Dataset`` or a
``torch.utils.data.Subset`` of a
``skorch.dataset.Dataset``.
X_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the X data. If
None, try to automatically determine how to index data.
y_indexing : function/callable or None (default=None)
If not None, use this function for indexing into the y data. If
None, try to automatically determine how to index data.
"""
(X, y) = (_none, _none)
if isinstance(dataset, Subset):
(X, y) = data_from_dataset(dataset.dataset, X_indexing=X_indexing, y_indexing=y_indexing)
X = multi_indexing(X, dataset.indices, indexing=X_indexing)
y = multi_indexing(y, dataset.indices, indexing=y_indexing) # depends on [control=['if'], data=[]]
elif hasattr(dataset, 'X') and hasattr(dataset, 'y'):
(X, y) = (dataset.X, dataset.y) # depends on [control=['if'], data=[]]
if X is _none or y is _none:
raise AttributeError('Could not access X and y from dataset.') # depends on [control=['if'], data=[]]
return (X, y) |
def ends_with_path_separator(self, file_path):
"""Return True if ``file_path`` ends with a valid path separator."""
if is_int_type(file_path):
return False
file_path = make_string_path(file_path)
return (file_path and
file_path not in (self.path_separator,
self.alternative_path_separator) and
(file_path.endswith(self._path_separator(file_path)) or
self.alternative_path_separator is not None and
file_path.endswith(
self._alternative_path_separator(file_path)))) | def function[ends_with_path_separator, parameter[self, file_path]]:
constant[Return True if ``file_path`` ends with a valid path separator.]
if call[name[is_int_type], parameter[name[file_path]]] begin[:]
return[constant[False]]
variable[file_path] assign[=] call[name[make_string_path], parameter[name[file_path]]]
return[<ast.BoolOp object at 0x7da18dc07310>] | keyword[def] identifier[ends_with_path_separator] ( identifier[self] , identifier[file_path] ):
literal[string]
keyword[if] identifier[is_int_type] ( identifier[file_path] ):
keyword[return] keyword[False]
identifier[file_path] = identifier[make_string_path] ( identifier[file_path] )
keyword[return] ( identifier[file_path] keyword[and]
identifier[file_path] keyword[not] keyword[in] ( identifier[self] . identifier[path_separator] ,
identifier[self] . identifier[alternative_path_separator] ) keyword[and]
( identifier[file_path] . identifier[endswith] ( identifier[self] . identifier[_path_separator] ( identifier[file_path] )) keyword[or]
identifier[self] . identifier[alternative_path_separator] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[file_path] . identifier[endswith] (
identifier[self] . identifier[_alternative_path_separator] ( identifier[file_path] )))) | def ends_with_path_separator(self, file_path):
"""Return True if ``file_path`` ends with a valid path separator."""
if is_int_type(file_path):
return False # depends on [control=['if'], data=[]]
file_path = make_string_path(file_path)
return file_path and file_path not in (self.path_separator, self.alternative_path_separator) and (file_path.endswith(self._path_separator(file_path)) or (self.alternative_path_separator is not None and file_path.endswith(self._alternative_path_separator(file_path)))) |
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
"Make a new function from a given template and update the signature"
src = src_templ % vars(self) # expand name and signature
evaldict = evaldict or {}
mo = DEF.match(src)
if mo is None:
raise SyntaxError('not a valid function template\n%s' % src)
name = mo.group(1) # extract the function name
names = set([name] + [arg.strip(' *') for arg in
self.shortsignature.split(',')])
for n in names:
if n in ('_func_', '_call_'):
raise NameError('%s is overridden in\n%s' % (n, src))
if not src.endswith('\n'): # add a newline just for safety
src += '\n' # this is needed in old versions of Python
try:
code = compile(src, '<string>', 'single')
# print >> sys.stderr, 'Compiling %s' % src
exec(code, evaldict)
except:
print('Error in generated code:', file=sys.stderr)
print(src, file=sys.stderr)
raise
func = evaldict[name]
if addsource:
attrs['__source__'] = src
self.update(func, **attrs)
return func | def function[make, parameter[self, src_templ, evaldict, addsource]]:
constant[Make a new function from a given template and update the signature]
variable[src] assign[=] binary_operation[name[src_templ] <ast.Mod object at 0x7da2590d6920> call[name[vars], parameter[name[self]]]]
variable[evaldict] assign[=] <ast.BoolOp object at 0x7da18dc99060>
variable[mo] assign[=] call[name[DEF].match, parameter[name[src]]]
if compare[name[mo] is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc983a0>
variable[name] assign[=] call[name[mo].group, parameter[constant[1]]]
variable[names] assign[=] call[name[set], parameter[binary_operation[list[[<ast.Name object at 0x7da18dc9b520>]] + <ast.ListComp object at 0x7da18dc9b580>]]]
for taget[name[n]] in starred[name[names]] begin[:]
if compare[name[n] in tuple[[<ast.Constant object at 0x7da18dc9aa10>, <ast.Constant object at 0x7da18dc98310>]]] begin[:]
<ast.Raise object at 0x7da18dc990f0>
if <ast.UnaryOp object at 0x7da18dc9a410> begin[:]
<ast.AugAssign object at 0x7da18dc9a6e0>
<ast.Try object at 0x7da18dc998a0>
variable[func] assign[=] call[name[evaldict]][name[name]]
if name[addsource] begin[:]
call[name[attrs]][constant[__source__]] assign[=] name[src]
call[name[self].update, parameter[name[func]]]
return[name[func]] | keyword[def] identifier[make] ( identifier[self] , identifier[src_templ] , identifier[evaldict] = keyword[None] , identifier[addsource] = keyword[False] ,** identifier[attrs] ):
literal[string]
identifier[src] = identifier[src_templ] % identifier[vars] ( identifier[self] )
identifier[evaldict] = identifier[evaldict] keyword[or] {}
identifier[mo] = identifier[DEF] . identifier[match] ( identifier[src] )
keyword[if] identifier[mo] keyword[is] keyword[None] :
keyword[raise] identifier[SyntaxError] ( literal[string] % identifier[src] )
identifier[name] = identifier[mo] . identifier[group] ( literal[int] )
identifier[names] = identifier[set] ([ identifier[name] ]+[ identifier[arg] . identifier[strip] ( literal[string] ) keyword[for] identifier[arg] keyword[in]
identifier[self] . identifier[shortsignature] . identifier[split] ( literal[string] )])
keyword[for] identifier[n] keyword[in] identifier[names] :
keyword[if] identifier[n] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[NameError] ( literal[string] %( identifier[n] , identifier[src] ))
keyword[if] keyword[not] identifier[src] . identifier[endswith] ( literal[string] ):
identifier[src] += literal[string]
keyword[try] :
identifier[code] = identifier[compile] ( identifier[src] , literal[string] , literal[string] )
identifier[exec] ( identifier[code] , identifier[evaldict] )
keyword[except] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( identifier[src] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[raise]
identifier[func] = identifier[evaldict] [ identifier[name] ]
keyword[if] identifier[addsource] :
identifier[attrs] [ literal[string] ]= identifier[src]
identifier[self] . identifier[update] ( identifier[func] ,** identifier[attrs] )
keyword[return] identifier[func] | def make(self, src_templ, evaldict=None, addsource=False, **attrs):
"""Make a new function from a given template and update the signature"""
src = src_templ % vars(self) # expand name and signature
evaldict = evaldict or {}
mo = DEF.match(src)
if mo is None:
raise SyntaxError('not a valid function template\n%s' % src) # depends on [control=['if'], data=[]]
name = mo.group(1) # extract the function name
names = set([name] + [arg.strip(' *') for arg in self.shortsignature.split(',')])
for n in names:
if n in ('_func_', '_call_'):
raise NameError('%s is overridden in\n%s' % (n, src)) # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['n']]
if not src.endswith('\n'): # add a newline just for safety
src += '\n' # this is needed in old versions of Python # depends on [control=['if'], data=[]]
try:
code = compile(src, '<string>', 'single')
# print >> sys.stderr, 'Compiling %s' % src
exec(code, evaldict) # depends on [control=['try'], data=[]]
except:
print('Error in generated code:', file=sys.stderr)
print(src, file=sys.stderr)
raise # depends on [control=['except'], data=[]]
func = evaldict[name]
if addsource:
attrs['__source__'] = src # depends on [control=['if'], data=[]]
self.update(func, **attrs)
return func |
def column_to_bq_schema(self):
"""Convert a column to a bigquery schema object.
"""
kwargs = {}
if len(self.fields) > 0:
fields = [field.column_to_bq_schema() for field in self.fields]
kwargs = {"fields": fields}
return google.cloud.bigquery.SchemaField(self.name, self.dtype,
self.mode, **kwargs) | def function[column_to_bq_schema, parameter[self]]:
constant[Convert a column to a bigquery schema object.
]
variable[kwargs] assign[=] dictionary[[], []]
if compare[call[name[len], parameter[name[self].fields]] greater[>] constant[0]] begin[:]
variable[fields] assign[=] <ast.ListComp object at 0x7da1b1a564d0>
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a576d0>], [<ast.Name object at 0x7da1b1a55d80>]]
return[call[name[google].cloud.bigquery.SchemaField, parameter[name[self].name, name[self].dtype, name[self].mode]]] | keyword[def] identifier[column_to_bq_schema] ( identifier[self] ):
literal[string]
identifier[kwargs] ={}
keyword[if] identifier[len] ( identifier[self] . identifier[fields] )> literal[int] :
identifier[fields] =[ identifier[field] . identifier[column_to_bq_schema] () keyword[for] identifier[field] keyword[in] identifier[self] . identifier[fields] ]
identifier[kwargs] ={ literal[string] : identifier[fields] }
keyword[return] identifier[google] . identifier[cloud] . identifier[bigquery] . identifier[SchemaField] ( identifier[self] . identifier[name] , identifier[self] . identifier[dtype] ,
identifier[self] . identifier[mode] ,** identifier[kwargs] ) | def column_to_bq_schema(self):
"""Convert a column to a bigquery schema object.
"""
kwargs = {}
if len(self.fields) > 0:
fields = [field.column_to_bq_schema() for field in self.fields]
kwargs = {'fields': fields} # depends on [control=['if'], data=[]]
return google.cloud.bigquery.SchemaField(self.name, self.dtype, self.mode, **kwargs) |
def get_translation(self, context_id, translation_id):
"""Retrieves a translation entry for the given id values.
:param int context_id: The id-value representing the context instance.
:param int translation_id: The id-value representing the translation
instance.
:return dict: Mapping of properties for the translation entry.
:raise SoftLayerAPIError: If a translation cannot be found.
"""
translation = next((x for x in self.get_translations(context_id)
if x['id'] == translation_id), None)
if translation is None:
raise SoftLayerAPIError('SoftLayer_Exception_ObjectNotFound',
'Unable to find object with id of \'{}\''
.format(translation_id))
return translation | def function[get_translation, parameter[self, context_id, translation_id]]:
constant[Retrieves a translation entry for the given id values.
:param int context_id: The id-value representing the context instance.
:param int translation_id: The id-value representing the translation
instance.
:return dict: Mapping of properties for the translation entry.
:raise SoftLayerAPIError: If a translation cannot be found.
]
variable[translation] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da18f722410>, constant[None]]]
if compare[name[translation] is constant[None]] begin[:]
<ast.Raise object at 0x7da18fe91930>
return[name[translation]] | keyword[def] identifier[get_translation] ( identifier[self] , identifier[context_id] , identifier[translation_id] ):
literal[string]
identifier[translation] = identifier[next] (( identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[get_translations] ( identifier[context_id] )
keyword[if] identifier[x] [ literal[string] ]== identifier[translation_id] ), keyword[None] )
keyword[if] identifier[translation] keyword[is] keyword[None] :
keyword[raise] identifier[SoftLayerAPIError] ( literal[string] ,
literal[string]
. identifier[format] ( identifier[translation_id] ))
keyword[return] identifier[translation] | def get_translation(self, context_id, translation_id):
"""Retrieves a translation entry for the given id values.
:param int context_id: The id-value representing the context instance.
:param int translation_id: The id-value representing the translation
instance.
:return dict: Mapping of properties for the translation entry.
:raise SoftLayerAPIError: If a translation cannot be found.
"""
translation = next((x for x in self.get_translations(context_id) if x['id'] == translation_id), None)
if translation is None:
raise SoftLayerAPIError('SoftLayer_Exception_ObjectNotFound', "Unable to find object with id of '{}'".format(translation_id)) # depends on [control=['if'], data=[]]
return translation |
def check_topics(client, req_topics):
"""Check for existence of provided topics in Kafka."""
client.update_cluster()
logger.debug('Found topics: %r', client.topics.keys())
for req_topic in req_topics:
if req_topic not in client.topics.keys():
err_topic_not_found = 'Topic not found: {}'.format(req_topic)
logger.warning(err_topic_not_found)
raise TopicNotFound(err_topic_not_found)
topic = client.topics[req_topic]
if not topic.partitions:
err_topic_no_part = 'Topic has no partitions: {}'.format(req_topic)
logger.warning(err_topic_no_part)
raise TopicNoPartition(err_topic_no_part)
logger.info('Topic is ready: %s', req_topic) | def function[check_topics, parameter[client, req_topics]]:
constant[Check for existence of provided topics in Kafka.]
call[name[client].update_cluster, parameter[]]
call[name[logger].debug, parameter[constant[Found topics: %r], call[name[client].topics.keys, parameter[]]]]
for taget[name[req_topic]] in starred[name[req_topics]] begin[:]
if compare[name[req_topic] <ast.NotIn object at 0x7da2590d7190> call[name[client].topics.keys, parameter[]]] begin[:]
variable[err_topic_not_found] assign[=] call[constant[Topic not found: {}].format, parameter[name[req_topic]]]
call[name[logger].warning, parameter[name[err_topic_not_found]]]
<ast.Raise object at 0x7da1b1800b50>
variable[topic] assign[=] call[name[client].topics][name[req_topic]]
if <ast.UnaryOp object at 0x7da1b18001c0> begin[:]
variable[err_topic_no_part] assign[=] call[constant[Topic has no partitions: {}].format, parameter[name[req_topic]]]
call[name[logger].warning, parameter[name[err_topic_no_part]]]
<ast.Raise object at 0x7da1b1800b80>
call[name[logger].info, parameter[constant[Topic is ready: %s], name[req_topic]]] | keyword[def] identifier[check_topics] ( identifier[client] , identifier[req_topics] ):
literal[string]
identifier[client] . identifier[update_cluster] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[client] . identifier[topics] . identifier[keys] ())
keyword[for] identifier[req_topic] keyword[in] identifier[req_topics] :
keyword[if] identifier[req_topic] keyword[not] keyword[in] identifier[client] . identifier[topics] . identifier[keys] ():
identifier[err_topic_not_found] = literal[string] . identifier[format] ( identifier[req_topic] )
identifier[logger] . identifier[warning] ( identifier[err_topic_not_found] )
keyword[raise] identifier[TopicNotFound] ( identifier[err_topic_not_found] )
identifier[topic] = identifier[client] . identifier[topics] [ identifier[req_topic] ]
keyword[if] keyword[not] identifier[topic] . identifier[partitions] :
identifier[err_topic_no_part] = literal[string] . identifier[format] ( identifier[req_topic] )
identifier[logger] . identifier[warning] ( identifier[err_topic_no_part] )
keyword[raise] identifier[TopicNoPartition] ( identifier[err_topic_no_part] )
identifier[logger] . identifier[info] ( literal[string] , identifier[req_topic] ) | def check_topics(client, req_topics):
"""Check for existence of provided topics in Kafka."""
client.update_cluster()
logger.debug('Found topics: %r', client.topics.keys())
for req_topic in req_topics:
if req_topic not in client.topics.keys():
err_topic_not_found = 'Topic not found: {}'.format(req_topic)
logger.warning(err_topic_not_found)
raise TopicNotFound(err_topic_not_found) # depends on [control=['if'], data=['req_topic']]
topic = client.topics[req_topic]
if not topic.partitions:
err_topic_no_part = 'Topic has no partitions: {}'.format(req_topic)
logger.warning(err_topic_no_part)
raise TopicNoPartition(err_topic_no_part) # depends on [control=['if'], data=[]]
logger.info('Topic is ready: %s', req_topic) # depends on [control=['for'], data=['req_topic']] |
def translate(term=None, phrase=None, api_key=GIPHY_PUBLIC_KEY, strict=False,
rating=None):
"""
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the translate method.
"""
return Giphy(api_key=api_key, strict=strict).translate(
term=term, phrase=phrase, rating=rating) | def function[translate, parameter[term, phrase, api_key, strict, rating]]:
constant[
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the translate method.
]
return[call[call[name[Giphy], parameter[]].translate, parameter[]]] | keyword[def] identifier[translate] ( identifier[term] = keyword[None] , identifier[phrase] = keyword[None] , identifier[api_key] = identifier[GIPHY_PUBLIC_KEY] , identifier[strict] = keyword[False] ,
identifier[rating] = keyword[None] ):
literal[string]
keyword[return] identifier[Giphy] ( identifier[api_key] = identifier[api_key] , identifier[strict] = identifier[strict] ). identifier[translate] (
identifier[term] = identifier[term] , identifier[phrase] = identifier[phrase] , identifier[rating] = identifier[rating] ) | def translate(term=None, phrase=None, api_key=GIPHY_PUBLIC_KEY, strict=False, rating=None):
"""
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the translate method.
"""
return Giphy(api_key=api_key, strict=strict).translate(term=term, phrase=phrase, rating=rating) |
def has_path(self, path):
'''Check if a path exists below this node.
@param path A list of path elements pointing to a node in the tree.
For example, ['/', 'localhost', 'dir.host']. The first
element in this path should be this node's name.
@return True if the path points to a node in the tree below this node,
or this node itself (for paths one element long). False
otherwise.
Example:
>>> c1 = TreeNode(name='c1')
>>> c2 = TreeNode(name='c2')
>>> p = TreeNode(name='p', children={'c1':c1, 'c2':c2})
>>> c1._parent = p
>>> c2._parent = p
>>> p.has_path(['p', 'c1'])
True
>>> p.has_path(['p', 'c3'])
False
'''
with self._mutex:
if path[0] == self._name:
if len(path) == 1:
return True
elif path[1] in self._children:
return self._children[path[1]].has_path(path[1:])
else:
return False
else:
return False | def function[has_path, parameter[self, path]]:
constant[Check if a path exists below this node.
@param path A list of path elements pointing to a node in the tree.
For example, ['/', 'localhost', 'dir.host']. The first
element in this path should be this node's name.
@return True if the path points to a node in the tree below this node,
or this node itself (for paths one element long). False
otherwise.
Example:
>>> c1 = TreeNode(name='c1')
>>> c2 = TreeNode(name='c2')
>>> p = TreeNode(name='p', children={'c1':c1, 'c2':c2})
>>> c1._parent = p
>>> c2._parent = p
>>> p.has_path(['p', 'c1'])
True
>>> p.has_path(['p', 'c3'])
False
]
with name[self]._mutex begin[:]
if compare[call[name[path]][constant[0]] equal[==] name[self]._name] begin[:]
if compare[call[name[len], parameter[name[path]]] equal[==] constant[1]] begin[:]
return[constant[True]] | keyword[def] identifier[has_path] ( identifier[self] , identifier[path] ):
literal[string]
keyword[with] identifier[self] . identifier[_mutex] :
keyword[if] identifier[path] [ literal[int] ]== identifier[self] . identifier[_name] :
keyword[if] identifier[len] ( identifier[path] )== literal[int] :
keyword[return] keyword[True]
keyword[elif] identifier[path] [ literal[int] ] keyword[in] identifier[self] . identifier[_children] :
keyword[return] identifier[self] . identifier[_children] [ identifier[path] [ literal[int] ]]. identifier[has_path] ( identifier[path] [ literal[int] :])
keyword[else] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[False] | def has_path(self, path):
"""Check if a path exists below this node.
@param path A list of path elements pointing to a node in the tree.
For example, ['/', 'localhost', 'dir.host']. The first
element in this path should be this node's name.
@return True if the path points to a node in the tree below this node,
or this node itself (for paths one element long). False
otherwise.
Example:
>>> c1 = TreeNode(name='c1')
>>> c2 = TreeNode(name='c2')
>>> p = TreeNode(name='p', children={'c1':c1, 'c2':c2})
>>> c1._parent = p
>>> c2._parent = p
>>> p.has_path(['p', 'c1'])
True
>>> p.has_path(['p', 'c3'])
False
"""
with self._mutex:
if path[0] == self._name:
if len(path) == 1:
return True # depends on [control=['if'], data=[]]
elif path[1] in self._children:
return self._children[path[1]].has_path(path[1:]) # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['with'], data=[]] |
def bank_account_query(self, number, date, account_type, bank_id):
"""Bank account statement request"""
return self.authenticated_query(
self._bareq(number, date, account_type, bank_id)
) | def function[bank_account_query, parameter[self, number, date, account_type, bank_id]]:
constant[Bank account statement request]
return[call[name[self].authenticated_query, parameter[call[name[self]._bareq, parameter[name[number], name[date], name[account_type], name[bank_id]]]]]] | keyword[def] identifier[bank_account_query] ( identifier[self] , identifier[number] , identifier[date] , identifier[account_type] , identifier[bank_id] ):
literal[string]
keyword[return] identifier[self] . identifier[authenticated_query] (
identifier[self] . identifier[_bareq] ( identifier[number] , identifier[date] , identifier[account_type] , identifier[bank_id] )
) | def bank_account_query(self, number, date, account_type, bank_id):
"""Bank account statement request"""
return self.authenticated_query(self._bareq(number, date, account_type, bank_id)) |
def pad_bin_image_to_shape(image, shape):
"""
Padd image to size :shape: with zeros
"""
h, w = shape
ih, iw = image.shape
assert ih <= h
assert iw <= w
if iw < w:
result = numpy.hstack((image, numpy.zeros((ih, w - iw), bool)))
else:
result = image
if ih < h:
result = numpy.vstack((result, numpy.zeros((h - ih, w), bool)))
return result | def function[pad_bin_image_to_shape, parameter[image, shape]]:
constant[
Padd image to size :shape: with zeros
]
<ast.Tuple object at 0x7da1b2775f90> assign[=] name[shape]
<ast.Tuple object at 0x7da1b2774ac0> assign[=] name[image].shape
assert[compare[name[ih] less_or_equal[<=] name[h]]]
assert[compare[name[iw] less_or_equal[<=] name[w]]]
if compare[name[iw] less[<] name[w]] begin[:]
variable[result] assign[=] call[name[numpy].hstack, parameter[tuple[[<ast.Name object at 0x7da1b2776c50>, <ast.Call object at 0x7da1b2776b60>]]]]
if compare[name[ih] less[<] name[h]] begin[:]
variable[result] assign[=] call[name[numpy].vstack, parameter[tuple[[<ast.Name object at 0x7da1b277d690>, <ast.Call object at 0x7da1b277fd30>]]]]
return[name[result]] | keyword[def] identifier[pad_bin_image_to_shape] ( identifier[image] , identifier[shape] ):
literal[string]
identifier[h] , identifier[w] = identifier[shape]
identifier[ih] , identifier[iw] = identifier[image] . identifier[shape]
keyword[assert] identifier[ih] <= identifier[h]
keyword[assert] identifier[iw] <= identifier[w]
keyword[if] identifier[iw] < identifier[w] :
identifier[result] = identifier[numpy] . identifier[hstack] (( identifier[image] , identifier[numpy] . identifier[zeros] (( identifier[ih] , identifier[w] - identifier[iw] ), identifier[bool] )))
keyword[else] :
identifier[result] = identifier[image]
keyword[if] identifier[ih] < identifier[h] :
identifier[result] = identifier[numpy] . identifier[vstack] (( identifier[result] , identifier[numpy] . identifier[zeros] (( identifier[h] - identifier[ih] , identifier[w] ), identifier[bool] )))
keyword[return] identifier[result] | def pad_bin_image_to_shape(image, shape):
"""
Padd image to size :shape: with zeros
"""
(h, w) = shape
(ih, iw) = image.shape
assert ih <= h
assert iw <= w
if iw < w:
result = numpy.hstack((image, numpy.zeros((ih, w - iw), bool))) # depends on [control=['if'], data=['iw', 'w']]
else:
result = image
if ih < h:
result = numpy.vstack((result, numpy.zeros((h - ih, w), bool))) # depends on [control=['if'], data=['ih', 'h']]
return result |
def home_two_point_field_goal_percentage(self):
"""
Returns a ``float`` of the number of two point field goals made divided
by the number of two point field goal attempts by the home team.
Percentage ranges from 0-1.
"""
result = float(self.home_two_point_field_goals) / \
float(self.home_two_point_field_goal_attempts)
return round(float(result), 3) | def function[home_two_point_field_goal_percentage, parameter[self]]:
constant[
Returns a ``float`` of the number of two point field goals made divided
by the number of two point field goal attempts by the home team.
Percentage ranges from 0-1.
]
variable[result] assign[=] binary_operation[call[name[float], parameter[name[self].home_two_point_field_goals]] / call[name[float], parameter[name[self].home_two_point_field_goal_attempts]]]
return[call[name[round], parameter[call[name[float], parameter[name[result]]], constant[3]]]] | keyword[def] identifier[home_two_point_field_goal_percentage] ( identifier[self] ):
literal[string]
identifier[result] = identifier[float] ( identifier[self] . identifier[home_two_point_field_goals] )/ identifier[float] ( identifier[self] . identifier[home_two_point_field_goal_attempts] )
keyword[return] identifier[round] ( identifier[float] ( identifier[result] ), literal[int] ) | def home_two_point_field_goal_percentage(self):
"""
Returns a ``float`` of the number of two point field goals made divided
by the number of two point field goal attempts by the home team.
Percentage ranges from 0-1.
"""
result = float(self.home_two_point_field_goals) / float(self.home_two_point_field_goal_attempts)
return round(float(result), 3) |
def remove_node(self, p_id, remove_unconnected_nodes=True):
""" Removes a node from the graph. """
if self.has_node(p_id):
for neighbor in self.incoming_neighbors(p_id):
self._edges[neighbor].remove(p_id)
neighbors = set()
if remove_unconnected_nodes:
neighbors = self.outgoing_neighbors(p_id)
del self._edges[p_id]
for neighbor in neighbors:
if self.is_isolated(neighbor):
self.remove_node(neighbor) | def function[remove_node, parameter[self, p_id, remove_unconnected_nodes]]:
constant[ Removes a node from the graph. ]
if call[name[self].has_node, parameter[name[p_id]]] begin[:]
for taget[name[neighbor]] in starred[call[name[self].incoming_neighbors, parameter[name[p_id]]]] begin[:]
call[call[name[self]._edges][name[neighbor]].remove, parameter[name[p_id]]]
variable[neighbors] assign[=] call[name[set], parameter[]]
if name[remove_unconnected_nodes] begin[:]
variable[neighbors] assign[=] call[name[self].outgoing_neighbors, parameter[name[p_id]]]
<ast.Delete object at 0x7da20c6c5c30>
for taget[name[neighbor]] in starred[name[neighbors]] begin[:]
if call[name[self].is_isolated, parameter[name[neighbor]]] begin[:]
call[name[self].remove_node, parameter[name[neighbor]]] | keyword[def] identifier[remove_node] ( identifier[self] , identifier[p_id] , identifier[remove_unconnected_nodes] = keyword[True] ):
literal[string]
keyword[if] identifier[self] . identifier[has_node] ( identifier[p_id] ):
keyword[for] identifier[neighbor] keyword[in] identifier[self] . identifier[incoming_neighbors] ( identifier[p_id] ):
identifier[self] . identifier[_edges] [ identifier[neighbor] ]. identifier[remove] ( identifier[p_id] )
identifier[neighbors] = identifier[set] ()
keyword[if] identifier[remove_unconnected_nodes] :
identifier[neighbors] = identifier[self] . identifier[outgoing_neighbors] ( identifier[p_id] )
keyword[del] identifier[self] . identifier[_edges] [ identifier[p_id] ]
keyword[for] identifier[neighbor] keyword[in] identifier[neighbors] :
keyword[if] identifier[self] . identifier[is_isolated] ( identifier[neighbor] ):
identifier[self] . identifier[remove_node] ( identifier[neighbor] ) | def remove_node(self, p_id, remove_unconnected_nodes=True):
""" Removes a node from the graph. """
if self.has_node(p_id):
for neighbor in self.incoming_neighbors(p_id):
self._edges[neighbor].remove(p_id) # depends on [control=['for'], data=['neighbor']]
neighbors = set()
if remove_unconnected_nodes:
neighbors = self.outgoing_neighbors(p_id) # depends on [control=['if'], data=[]]
del self._edges[p_id]
for neighbor in neighbors:
if self.is_isolated(neighbor):
self.remove_node(neighbor) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['neighbor']] # depends on [control=['if'], data=[]] |
def add_boundary_pores(self, labels=['top', 'bottom', 'front', 'back',
'left', 'right'], offset=None):
r"""
Add boundary pores to the specified faces of the network
Pores are offset from the faces of the domain.
Parameters
----------
labels : string or list of strings
The labels indicating the pores defining each face where boundary
pores are to be added (e.g. 'left' or ['left', 'right'])
offset : scalar or array_like
The spacing of the network (e.g. [1, 1, 1]). This must be given
since it can be quite difficult to infer from the network,
for instance if boundary pores have already added to other faces.
"""
offset = sp.array(offset)
if offset.size == 1:
offset = sp.ones(3)*offset
for item in labels:
Ps = self.pores(item)
coords = sp.absolute(self['pore.coords'][Ps])
axis = sp.count_nonzero(sp.diff(coords, axis=0), axis=0) == 0
ax_off = sp.array(axis, dtype=int)*offset
if sp.amin(coords) == sp.amin(coords[:, sp.where(axis)[0]]):
ax_off = -1*ax_off
topotools.add_boundary_pores(network=self, pores=Ps, offset=ax_off,
apply_label=item + '_boundary') | def function[add_boundary_pores, parameter[self, labels, offset]]:
constant[
Add boundary pores to the specified faces of the network
Pores are offset from the faces of the domain.
Parameters
----------
labels : string or list of strings
The labels indicating the pores defining each face where boundary
pores are to be added (e.g. 'left' or ['left', 'right'])
offset : scalar or array_like
The spacing of the network (e.g. [1, 1, 1]). This must be given
since it can be quite difficult to infer from the network,
for instance if boundary pores have already added to other faces.
]
variable[offset] assign[=] call[name[sp].array, parameter[name[offset]]]
if compare[name[offset].size equal[==] constant[1]] begin[:]
variable[offset] assign[=] binary_operation[call[name[sp].ones, parameter[constant[3]]] * name[offset]]
for taget[name[item]] in starred[name[labels]] begin[:]
variable[Ps] assign[=] call[name[self].pores, parameter[name[item]]]
variable[coords] assign[=] call[name[sp].absolute, parameter[call[call[name[self]][constant[pore.coords]]][name[Ps]]]]
variable[axis] assign[=] compare[call[name[sp].count_nonzero, parameter[call[name[sp].diff, parameter[name[coords]]]]] equal[==] constant[0]]
variable[ax_off] assign[=] binary_operation[call[name[sp].array, parameter[name[axis]]] * name[offset]]
if compare[call[name[sp].amin, parameter[name[coords]]] equal[==] call[name[sp].amin, parameter[call[name[coords]][tuple[[<ast.Slice object at 0x7da18c4cce50>, <ast.Subscript object at 0x7da18c4cc880>]]]]]] begin[:]
variable[ax_off] assign[=] binary_operation[<ast.UnaryOp object at 0x7da18c4cdfc0> * name[ax_off]]
call[name[topotools].add_boundary_pores, parameter[]] | keyword[def] identifier[add_boundary_pores] ( identifier[self] , identifier[labels] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ], identifier[offset] = keyword[None] ):
literal[string]
identifier[offset] = identifier[sp] . identifier[array] ( identifier[offset] )
keyword[if] identifier[offset] . identifier[size] == literal[int] :
identifier[offset] = identifier[sp] . identifier[ones] ( literal[int] )* identifier[offset]
keyword[for] identifier[item] keyword[in] identifier[labels] :
identifier[Ps] = identifier[self] . identifier[pores] ( identifier[item] )
identifier[coords] = identifier[sp] . identifier[absolute] ( identifier[self] [ literal[string] ][ identifier[Ps] ])
identifier[axis] = identifier[sp] . identifier[count_nonzero] ( identifier[sp] . identifier[diff] ( identifier[coords] , identifier[axis] = literal[int] ), identifier[axis] = literal[int] )== literal[int]
identifier[ax_off] = identifier[sp] . identifier[array] ( identifier[axis] , identifier[dtype] = identifier[int] )* identifier[offset]
keyword[if] identifier[sp] . identifier[amin] ( identifier[coords] )== identifier[sp] . identifier[amin] ( identifier[coords] [:, identifier[sp] . identifier[where] ( identifier[axis] )[ literal[int] ]]):
identifier[ax_off] =- literal[int] * identifier[ax_off]
identifier[topotools] . identifier[add_boundary_pores] ( identifier[network] = identifier[self] , identifier[pores] = identifier[Ps] , identifier[offset] = identifier[ax_off] ,
identifier[apply_label] = identifier[item] + literal[string] ) | def add_boundary_pores(self, labels=['top', 'bottom', 'front', 'back', 'left', 'right'], offset=None):
"""
Add boundary pores to the specified faces of the network
Pores are offset from the faces of the domain.
Parameters
----------
labels : string or list of strings
The labels indicating the pores defining each face where boundary
pores are to be added (e.g. 'left' or ['left', 'right'])
offset : scalar or array_like
The spacing of the network (e.g. [1, 1, 1]). This must be given
since it can be quite difficult to infer from the network,
for instance if boundary pores have already added to other faces.
"""
offset = sp.array(offset)
if offset.size == 1:
offset = sp.ones(3) * offset # depends on [control=['if'], data=[]]
for item in labels:
Ps = self.pores(item)
coords = sp.absolute(self['pore.coords'][Ps])
axis = sp.count_nonzero(sp.diff(coords, axis=0), axis=0) == 0
ax_off = sp.array(axis, dtype=int) * offset
if sp.amin(coords) == sp.amin(coords[:, sp.where(axis)[0]]):
ax_off = -1 * ax_off # depends on [control=['if'], data=[]]
topotools.add_boundary_pores(network=self, pores=Ps, offset=ax_off, apply_label=item + '_boundary') # depends on [control=['for'], data=['item']] |
def _callable_from_gvcf(data, vrn_file, out_dir):
"""Retrieve callable regions based on ref call regions in gVCF.
Uses https://github.com/lijiayong/gvcf_regions
"""
methods = {"freebayes": "freebayes", "platypus": "platypus",
"gatk-haplotype": "gatk"}
gvcf_type = methods.get(dd.get_variantcaller(data))
if gvcf_type:
out_file = os.path.join(out_dir, "%s-gcvf-coverage.bed" %
utils.splitext_plus(os.path.basename(vrn_file))[0])
if not utils.file_uptodate(out_file, vrn_file):
with file_transaction(data, out_file) as tx_out_file:
cmd = ("gvcf_regions.py --gvcf_type {gvcf_type} {vrn_file} "
"| bedtools merge > {tx_out_file}")
do.run(cmd.format(**locals()), "Convert gVCF to BED file of callable regions")
return out_file | def function[_callable_from_gvcf, parameter[data, vrn_file, out_dir]]:
constant[Retrieve callable regions based on ref call regions in gVCF.
Uses https://github.com/lijiayong/gvcf_regions
]
variable[methods] assign[=] dictionary[[<ast.Constant object at 0x7da1b19d8f40>, <ast.Constant object at 0x7da1b17a6320>, <ast.Constant object at 0x7da1b17a7e80>], [<ast.Constant object at 0x7da1b17a5840>, <ast.Constant object at 0x7da1b17a43a0>, <ast.Constant object at 0x7da1b17a70d0>]]
variable[gvcf_type] assign[=] call[name[methods].get, parameter[call[name[dd].get_variantcaller, parameter[name[data]]]]]
if name[gvcf_type] begin[:]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], binary_operation[constant[%s-gcvf-coverage.bed] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[call[name[os].path.basename, parameter[name[vrn_file]]]]]][constant[0]]]]]
if <ast.UnaryOp object at 0x7da1b17a6920> begin[:]
with call[name[file_transaction], parameter[name[data], name[out_file]]] begin[:]
variable[cmd] assign[=] constant[gvcf_regions.py --gvcf_type {gvcf_type} {vrn_file} | bedtools merge > {tx_out_file}]
call[name[do].run, parameter[call[name[cmd].format, parameter[]], constant[Convert gVCF to BED file of callable regions]]]
return[name[out_file]] | keyword[def] identifier[_callable_from_gvcf] ( identifier[data] , identifier[vrn_file] , identifier[out_dir] ):
literal[string]
identifier[methods] ={ literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[gvcf_type] = identifier[methods] . identifier[get] ( identifier[dd] . identifier[get_variantcaller] ( identifier[data] ))
keyword[if] identifier[gvcf_type] :
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] %
identifier[utils] . identifier[splitext_plus] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[vrn_file] ))[ literal[int] ])
keyword[if] keyword[not] identifier[utils] . identifier[file_uptodate] ( identifier[out_file] , identifier[vrn_file] ):
keyword[with] identifier[file_transaction] ( identifier[data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] :
identifier[cmd] =( literal[string]
literal[string] )
identifier[do] . identifier[run] ( identifier[cmd] . identifier[format] (** identifier[locals] ()), literal[string] )
keyword[return] identifier[out_file] | def _callable_from_gvcf(data, vrn_file, out_dir):
"""Retrieve callable regions based on ref call regions in gVCF.
Uses https://github.com/lijiayong/gvcf_regions
"""
methods = {'freebayes': 'freebayes', 'platypus': 'platypus', 'gatk-haplotype': 'gatk'}
gvcf_type = methods.get(dd.get_variantcaller(data))
if gvcf_type:
out_file = os.path.join(out_dir, '%s-gcvf-coverage.bed' % utils.splitext_plus(os.path.basename(vrn_file))[0])
if not utils.file_uptodate(out_file, vrn_file):
with file_transaction(data, out_file) as tx_out_file:
cmd = 'gvcf_regions.py --gvcf_type {gvcf_type} {vrn_file} | bedtools merge > {tx_out_file}'
do.run(cmd.format(**locals()), 'Convert gVCF to BED file of callable regions') # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
return out_file # depends on [control=['if'], data=[]] |
def page(self, status=values.unset, source_sid=values.unset,
grouping_sid=values.unset, date_created_after=values.unset,
date_created_before=values.unset, media_type=values.unset,
page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of RecordingInstance records from the API.
Request is executed immediately
:param RecordingInstance.Status status: Only show Recordings with the given status.
:param unicode source_sid: Only show the Recordings with the given source Sid.
:param unicode grouping_sid: Only show Recordings that have this GroupingSid.
:param datetime date_created_after: Only show Recordings that started on or after this ISO8601 date-time with timezone.
:param datetime date_created_before: Only show Recordings that started before this ISO8601 date-time with timezone.
:param RecordingInstance.Type media_type: Only show Recordings that have this media type.
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of RecordingInstance
:rtype: twilio.rest.video.v1.recording.RecordingPage
"""
params = values.of({
'Status': status,
'SourceSid': source_sid,
'GroupingSid': serialize.map(grouping_sid, lambda e: e),
'DateCreatedAfter': serialize.iso8601_datetime(date_created_after),
'DateCreatedBefore': serialize.iso8601_datetime(date_created_before),
'MediaType': media_type,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return RecordingPage(self._version, response, self._solution) | def function[page, parameter[self, status, source_sid, grouping_sid, date_created_after, date_created_before, media_type, page_token, page_number, page_size]]:
constant[
Retrieve a single page of RecordingInstance records from the API.
Request is executed immediately
:param RecordingInstance.Status status: Only show Recordings with the given status.
:param unicode source_sid: Only show the Recordings with the given source Sid.
:param unicode grouping_sid: Only show Recordings that have this GroupingSid.
:param datetime date_created_after: Only show Recordings that started on or after this ISO8601 date-time with timezone.
:param datetime date_created_before: Only show Recordings that started before this ISO8601 date-time with timezone.
:param RecordingInstance.Type media_type: Only show Recordings that have this media type.
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of RecordingInstance
:rtype: twilio.rest.video.v1.recording.RecordingPage
]
variable[params] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da20c6aa140>, <ast.Constant object at 0x7da20c6a8610>, <ast.Constant object at 0x7da20c6a8700>, <ast.Constant object at 0x7da20c6a86d0>, <ast.Constant object at 0x7da20c6a81f0>, <ast.Constant object at 0x7da20c6aaf80>, <ast.Constant object at 0x7da20c6a8520>, <ast.Constant object at 0x7da20c6a9f30>, <ast.Constant object at 0x7da20c6a8a30>], [<ast.Name object at 0x7da20c6a9c00>, <ast.Name object at 0x7da20c6a9ba0>, <ast.Call object at 0x7da20c6a9f90>, <ast.Call object at 0x7da20c6aa080>, <ast.Call object at 0x7da20c6a9270>, <ast.Name object at 0x7da20c6ab430>, <ast.Name object at 0x7da20c6aa9e0>, <ast.Name object at 0x7da20c6aa4a0>, <ast.Name object at 0x7da20c6abaf0>]]]]
variable[response] assign[=] call[name[self]._version.page, parameter[constant[GET], name[self]._uri]]
return[call[name[RecordingPage], parameter[name[self]._version, name[response], name[self]._solution]]] | keyword[def] identifier[page] ( identifier[self] , identifier[status] = identifier[values] . identifier[unset] , identifier[source_sid] = identifier[values] . identifier[unset] ,
identifier[grouping_sid] = identifier[values] . identifier[unset] , identifier[date_created_after] = identifier[values] . identifier[unset] ,
identifier[date_created_before] = identifier[values] . identifier[unset] , identifier[media_type] = identifier[values] . identifier[unset] ,
identifier[page_token] = identifier[values] . identifier[unset] , identifier[page_number] = identifier[values] . identifier[unset] ,
identifier[page_size] = identifier[values] . identifier[unset] ):
literal[string]
identifier[params] = identifier[values] . identifier[of] ({
literal[string] : identifier[status] ,
literal[string] : identifier[source_sid] ,
literal[string] : identifier[serialize] . identifier[map] ( identifier[grouping_sid] , keyword[lambda] identifier[e] : identifier[e] ),
literal[string] : identifier[serialize] . identifier[iso8601_datetime] ( identifier[date_created_after] ),
literal[string] : identifier[serialize] . identifier[iso8601_datetime] ( identifier[date_created_before] ),
literal[string] : identifier[media_type] ,
literal[string] : identifier[page_token] ,
literal[string] : identifier[page_number] ,
literal[string] : identifier[page_size] ,
})
identifier[response] = identifier[self] . identifier[_version] . identifier[page] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[params] = identifier[params] ,
)
keyword[return] identifier[RecordingPage] ( identifier[self] . identifier[_version] , identifier[response] , identifier[self] . identifier[_solution] ) | def page(self, status=values.unset, source_sid=values.unset, grouping_sid=values.unset, date_created_after=values.unset, date_created_before=values.unset, media_type=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of RecordingInstance records from the API.
Request is executed immediately
:param RecordingInstance.Status status: Only show Recordings with the given status.
:param unicode source_sid: Only show the Recordings with the given source Sid.
:param unicode grouping_sid: Only show Recordings that have this GroupingSid.
:param datetime date_created_after: Only show Recordings that started on or after this ISO8601 date-time with timezone.
:param datetime date_created_before: Only show Recordings that started before this ISO8601 date-time with timezone.
:param RecordingInstance.Type media_type: Only show Recordings that have this media type.
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of RecordingInstance
:rtype: twilio.rest.video.v1.recording.RecordingPage
"""
params = values.of({'Status': status, 'SourceSid': source_sid, 'GroupingSid': serialize.map(grouping_sid, lambda e: e), 'DateCreatedAfter': serialize.iso8601_datetime(date_created_after), 'DateCreatedBefore': serialize.iso8601_datetime(date_created_before), 'MediaType': media_type, 'PageToken': page_token, 'Page': page_number, 'PageSize': page_size})
response = self._version.page('GET', self._uri, params=params)
return RecordingPage(self._version, response, self._solution) |
def interactive_client_choice(self):
"""
Present a menu for user to select from ESP/MSP managed clients they have permission to.
**Returns:** Tuple with (Boolean success, selected client ID).
"""
clients = self._parent_class.get.clients_t()
clients_perms = self._parent_class.get.permissions_clients_d(self._parent_class._user_id)
client_status = clients.cgx_status
clients_dict = clients.cgx_content
c_perms_status = clients_perms.cgx_status
c_perms_dict = clients_perms.cgx_content
# Build MSP/ESP id-name dict, get list of allowed tenants.
if client_status and c_perms_status:
client_id_name = {}
for client in clients_dict.get('items', []):
if type(client) is dict:
# create client ID to name map table.
client_id_name[client.get('id', "err")] = client.get('canonical_name')
# Valid clients w/permissions - create list of tuples for menu
menu_list = []
for client in c_perms_dict.get('items', []):
if type(client) is dict:
# add entry
client_id = client.get('client_id')
# create tuple of ( client name, client id ) to append to list
menu_list.append(
(client_id_name.get(client_id, client_id), client_id)
)
# empty menu?
if not menu_list:
# no clients
print("No ESP/MSP clients allowed for user.")
return False, {}
# ask user to select client
_, chosen_client_id = self.quick_menu("ESP/MSP Detected. Select a client to use:", "{0}) {1}", menu_list)
return True, chosen_client_id
else:
print("ESP/MSP detail retrieval failed.")
return False, {} | def function[interactive_client_choice, parameter[self]]:
constant[
Present a menu for user to select from ESP/MSP managed clients they have permission to.
**Returns:** Tuple with (Boolean success, selected client ID).
]
variable[clients] assign[=] call[name[self]._parent_class.get.clients_t, parameter[]]
variable[clients_perms] assign[=] call[name[self]._parent_class.get.permissions_clients_d, parameter[name[self]._parent_class._user_id]]
variable[client_status] assign[=] name[clients].cgx_status
variable[clients_dict] assign[=] name[clients].cgx_content
variable[c_perms_status] assign[=] name[clients_perms].cgx_status
variable[c_perms_dict] assign[=] name[clients_perms].cgx_content
if <ast.BoolOp object at 0x7da1b0e7e9e0> begin[:]
variable[client_id_name] assign[=] dictionary[[], []]
for taget[name[client]] in starred[call[name[clients_dict].get, parameter[constant[items], list[[]]]]] begin[:]
if compare[call[name[type], parameter[name[client]]] is name[dict]] begin[:]
call[name[client_id_name]][call[name[client].get, parameter[constant[id], constant[err]]]] assign[=] call[name[client].get, parameter[constant[canonical_name]]]
variable[menu_list] assign[=] list[[]]
for taget[name[client]] in starred[call[name[c_perms_dict].get, parameter[constant[items], list[[]]]]] begin[:]
if compare[call[name[type], parameter[name[client]]] is name[dict]] begin[:]
variable[client_id] assign[=] call[name[client].get, parameter[constant[client_id]]]
call[name[menu_list].append, parameter[tuple[[<ast.Call object at 0x7da1b0f90640>, <ast.Name object at 0x7da1b0f91b40>]]]]
if <ast.UnaryOp object at 0x7da1b0f92140> begin[:]
call[name[print], parameter[constant[No ESP/MSP clients allowed for user.]]]
return[tuple[[<ast.Constant object at 0x7da1b0f90370>, <ast.Dict object at 0x7da1b0f91600>]]]
<ast.Tuple object at 0x7da1b0f90c40> assign[=] call[name[self].quick_menu, parameter[constant[ESP/MSP Detected. Select a client to use:], constant[{0}) {1}], name[menu_list]]]
return[tuple[[<ast.Constant object at 0x7da18dc98fd0>, <ast.Name object at 0x7da18dc9b790>]]] | keyword[def] identifier[interactive_client_choice] ( identifier[self] ):
literal[string]
identifier[clients] = identifier[self] . identifier[_parent_class] . identifier[get] . identifier[clients_t] ()
identifier[clients_perms] = identifier[self] . identifier[_parent_class] . identifier[get] . identifier[permissions_clients_d] ( identifier[self] . identifier[_parent_class] . identifier[_user_id] )
identifier[client_status] = identifier[clients] . identifier[cgx_status]
identifier[clients_dict] = identifier[clients] . identifier[cgx_content]
identifier[c_perms_status] = identifier[clients_perms] . identifier[cgx_status]
identifier[c_perms_dict] = identifier[clients_perms] . identifier[cgx_content]
keyword[if] identifier[client_status] keyword[and] identifier[c_perms_status] :
identifier[client_id_name] ={}
keyword[for] identifier[client] keyword[in] identifier[clients_dict] . identifier[get] ( literal[string] ,[]):
keyword[if] identifier[type] ( identifier[client] ) keyword[is] identifier[dict] :
identifier[client_id_name] [ identifier[client] . identifier[get] ( literal[string] , literal[string] )]= identifier[client] . identifier[get] ( literal[string] )
identifier[menu_list] =[]
keyword[for] identifier[client] keyword[in] identifier[c_perms_dict] . identifier[get] ( literal[string] ,[]):
keyword[if] identifier[type] ( identifier[client] ) keyword[is] identifier[dict] :
identifier[client_id] = identifier[client] . identifier[get] ( literal[string] )
identifier[menu_list] . identifier[append] (
( identifier[client_id_name] . identifier[get] ( identifier[client_id] , identifier[client_id] ), identifier[client_id] )
)
keyword[if] keyword[not] identifier[menu_list] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] ,{}
identifier[_] , identifier[chosen_client_id] = identifier[self] . identifier[quick_menu] ( literal[string] , literal[string] , identifier[menu_list] )
keyword[return] keyword[True] , identifier[chosen_client_id]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] ,{} | def interactive_client_choice(self):
"""
Present a menu for user to select from ESP/MSP managed clients they have permission to.
**Returns:** Tuple with (Boolean success, selected client ID).
"""
clients = self._parent_class.get.clients_t()
clients_perms = self._parent_class.get.permissions_clients_d(self._parent_class._user_id)
client_status = clients.cgx_status
clients_dict = clients.cgx_content
c_perms_status = clients_perms.cgx_status
c_perms_dict = clients_perms.cgx_content
# Build MSP/ESP id-name dict, get list of allowed tenants.
if client_status and c_perms_status:
client_id_name = {}
for client in clients_dict.get('items', []):
if type(client) is dict:
# create client ID to name map table.
client_id_name[client.get('id', 'err')] = client.get('canonical_name') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['client']]
# Valid clients w/permissions - create list of tuples for menu
menu_list = []
for client in c_perms_dict.get('items', []):
if type(client) is dict:
# add entry
client_id = client.get('client_id')
# create tuple of ( client name, client id ) to append to list
menu_list.append((client_id_name.get(client_id, client_id), client_id)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['client']]
# empty menu?
if not menu_list:
# no clients
print('No ESP/MSP clients allowed for user.')
return (False, {}) # depends on [control=['if'], data=[]]
# ask user to select client
(_, chosen_client_id) = self.quick_menu('ESP/MSP Detected. Select a client to use:', '{0}) {1}', menu_list)
return (True, chosen_client_id) # depends on [control=['if'], data=[]]
else:
print('ESP/MSP detail retrieval failed.')
return (False, {}) |
def to_igraph(self, attribute="weight", **kwargs):
"""Convert to an igraph Graph
Uses the igraph.Graph.Weighted_Adjacency constructor
Parameters
----------
attribute : str, optional (default: "weight")
kwargs : additional arguments for igraph.Graph.Weighted_Adjacency
"""
try:
import igraph as ig
except ImportError:
raise ImportError("Please install igraph with "
"`pip install --user python-igraph`.")
try:
W = self.W
except AttributeError:
# not a pygsp graph
W = self.K.copy()
W = utils.set_diagonal(W, 0)
return ig.Graph.Weighted_Adjacency(utils.to_dense(W).tolist(),
attr=attribute, **kwargs) | def function[to_igraph, parameter[self, attribute]]:
constant[Convert to an igraph Graph
Uses the igraph.Graph.Weighted_Adjacency constructor
Parameters
----------
attribute : str, optional (default: "weight")
kwargs : additional arguments for igraph.Graph.Weighted_Adjacency
]
<ast.Try object at 0x7da1b0c8ae90>
<ast.Try object at 0x7da1b0c88a60>
return[call[name[ig].Graph.Weighted_Adjacency, parameter[call[call[name[utils].to_dense, parameter[name[W]]].tolist, parameter[]]]]] | keyword[def] identifier[to_igraph] ( identifier[self] , identifier[attribute] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[import] identifier[igraph] keyword[as] identifier[ig]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string]
literal[string] )
keyword[try] :
identifier[W] = identifier[self] . identifier[W]
keyword[except] identifier[AttributeError] :
identifier[W] = identifier[self] . identifier[K] . identifier[copy] ()
identifier[W] = identifier[utils] . identifier[set_diagonal] ( identifier[W] , literal[int] )
keyword[return] identifier[ig] . identifier[Graph] . identifier[Weighted_Adjacency] ( identifier[utils] . identifier[to_dense] ( identifier[W] ). identifier[tolist] (),
identifier[attr] = identifier[attribute] ,** identifier[kwargs] ) | def to_igraph(self, attribute='weight', **kwargs):
"""Convert to an igraph Graph
Uses the igraph.Graph.Weighted_Adjacency constructor
Parameters
----------
attribute : str, optional (default: "weight")
kwargs : additional arguments for igraph.Graph.Weighted_Adjacency
"""
try:
import igraph as ig # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError('Please install igraph with `pip install --user python-igraph`.') # depends on [control=['except'], data=[]]
try:
W = self.W # depends on [control=['try'], data=[]]
except AttributeError:
# not a pygsp graph
W = self.K.copy()
W = utils.set_diagonal(W, 0) # depends on [control=['except'], data=[]]
return ig.Graph.Weighted_Adjacency(utils.to_dense(W).tolist(), attr=attribute, **kwargs) |
def get_edges_as_list(self):
"""
Iterator that returns all the edge objects
@rtype: L{Cedge}
@return: terminal objects (iterator)
"""
my_edges = []
for edge_node in self.__get_edge_nodes():
my_edges.append(Cedge(edge_node))
return my_edges | def function[get_edges_as_list, parameter[self]]:
constant[
Iterator that returns all the edge objects
@rtype: L{Cedge}
@return: terminal objects (iterator)
]
variable[my_edges] assign[=] list[[]]
for taget[name[edge_node]] in starred[call[name[self].__get_edge_nodes, parameter[]]] begin[:]
call[name[my_edges].append, parameter[call[name[Cedge], parameter[name[edge_node]]]]]
return[name[my_edges]] | keyword[def] identifier[get_edges_as_list] ( identifier[self] ):
literal[string]
identifier[my_edges] =[]
keyword[for] identifier[edge_node] keyword[in] identifier[self] . identifier[__get_edge_nodes] ():
identifier[my_edges] . identifier[append] ( identifier[Cedge] ( identifier[edge_node] ))
keyword[return] identifier[my_edges] | def get_edges_as_list(self):
"""
Iterator that returns all the edge objects
@rtype: L{Cedge}
@return: terminal objects (iterator)
"""
my_edges = []
for edge_node in self.__get_edge_nodes():
my_edges.append(Cedge(edge_node)) # depends on [control=['for'], data=['edge_node']]
return my_edges |
def proposals(ctx, account):
""" List proposals
"""
proposals = Proposals(account)
t = PrettyTable(
[
"id",
"expiration",
"proposer",
"required approvals",
"available approvals",
"review period time",
"proposal",
]
)
t.align = "l"
for proposal in proposals:
if proposal.proposer:
proposer = Account(proposal.proposer, peerplays_instance=ctx.peerplays)[
"name"
]
else:
proposer = "n/a"
t.add_row(
[
proposal["id"],
proposal["expiration_time"],
proposer,
[
Account(x)["name"]
for x in (
proposal["required_active_approvals"]
+ proposal["required_owner_approvals"]
)
],
json.dumps(
[Account(x)["name"] for x in proposal["available_active_approvals"]]
+ proposal["available_key_approvals"]
+ proposal["available_owner_approvals"],
indent=1,
),
proposal.get("review_period_time", None),
json.dumps(proposal["proposed_transaction"], indent=4),
]
)
click.echo(str(t)) | def function[proposals, parameter[ctx, account]]:
constant[ List proposals
]
variable[proposals] assign[=] call[name[Proposals], parameter[name[account]]]
variable[t] assign[=] call[name[PrettyTable], parameter[list[[<ast.Constant object at 0x7da1b1041b40>, <ast.Constant object at 0x7da1b10404f0>, <ast.Constant object at 0x7da1b1042e30>, <ast.Constant object at 0x7da1b1041ea0>, <ast.Constant object at 0x7da1b1043cd0>, <ast.Constant object at 0x7da1b1041900>, <ast.Constant object at 0x7da1b10403d0>]]]]
name[t].align assign[=] constant[l]
for taget[name[proposal]] in starred[name[proposals]] begin[:]
if name[proposal].proposer begin[:]
variable[proposer] assign[=] call[call[name[Account], parameter[name[proposal].proposer]]][constant[name]]
call[name[t].add_row, parameter[list[[<ast.Subscript object at 0x7da1b1041b70>, <ast.Subscript object at 0x7da1b10419f0>, <ast.Name object at 0x7da1b1042920>, <ast.ListComp object at 0x7da1b103b8e0>, <ast.Call object at 0x7da1b1038df0>, <ast.Call object at 0x7da1b103b460>, <ast.Call object at 0x7da1b103bd30>]]]]
call[name[click].echo, parameter[call[name[str], parameter[name[t]]]]] | keyword[def] identifier[proposals] ( identifier[ctx] , identifier[account] ):
literal[string]
identifier[proposals] = identifier[Proposals] ( identifier[account] )
identifier[t] = identifier[PrettyTable] (
[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
)
identifier[t] . identifier[align] = literal[string]
keyword[for] identifier[proposal] keyword[in] identifier[proposals] :
keyword[if] identifier[proposal] . identifier[proposer] :
identifier[proposer] = identifier[Account] ( identifier[proposal] . identifier[proposer] , identifier[peerplays_instance] = identifier[ctx] . identifier[peerplays] )[
literal[string]
]
keyword[else] :
identifier[proposer] = literal[string]
identifier[t] . identifier[add_row] (
[
identifier[proposal] [ literal[string] ],
identifier[proposal] [ literal[string] ],
identifier[proposer] ,
[
identifier[Account] ( identifier[x] )[ literal[string] ]
keyword[for] identifier[x] keyword[in] (
identifier[proposal] [ literal[string] ]
+ identifier[proposal] [ literal[string] ]
)
],
identifier[json] . identifier[dumps] (
[ identifier[Account] ( identifier[x] )[ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[proposal] [ literal[string] ]]
+ identifier[proposal] [ literal[string] ]
+ identifier[proposal] [ literal[string] ],
identifier[indent] = literal[int] ,
),
identifier[proposal] . identifier[get] ( literal[string] , keyword[None] ),
identifier[json] . identifier[dumps] ( identifier[proposal] [ literal[string] ], identifier[indent] = literal[int] ),
]
)
identifier[click] . identifier[echo] ( identifier[str] ( identifier[t] )) | def proposals(ctx, account):
""" List proposals
"""
proposals = Proposals(account)
t = PrettyTable(['id', 'expiration', 'proposer', 'required approvals', 'available approvals', 'review period time', 'proposal'])
t.align = 'l'
for proposal in proposals:
if proposal.proposer:
proposer = Account(proposal.proposer, peerplays_instance=ctx.peerplays)['name'] # depends on [control=['if'], data=[]]
else:
proposer = 'n/a'
t.add_row([proposal['id'], proposal['expiration_time'], proposer, [Account(x)['name'] for x in proposal['required_active_approvals'] + proposal['required_owner_approvals']], json.dumps([Account(x)['name'] for x in proposal['available_active_approvals']] + proposal['available_key_approvals'] + proposal['available_owner_approvals'], indent=1), proposal.get('review_period_time', None), json.dumps(proposal['proposed_transaction'], indent=4)]) # depends on [control=['for'], data=['proposal']]
click.echo(str(t)) |
def find_patches(modules, recursive=True):
"""Find all the patches created through decorators.
Parameters
----------
modules : list of module
Modules and/or packages to search the patches in.
recursive : bool
``True`` to search recursively in subpackages.
Returns
-------
list of gorilla.Patch
Patches found.
Raises
------
TypeError
The input is not a valid package or module.
See Also
--------
:func:`patch`, :func:`patches`.
"""
out = []
modules = (module
for package in modules
for module in _module_iterator(package, recursive=recursive))
for module in modules:
members = _get_members(module, filter=None)
for _, value in members:
base = _get_base(value)
decorator_data = get_decorator_data(base)
if decorator_data is None:
continue
out.extend(decorator_data.patches)
return out | def function[find_patches, parameter[modules, recursive]]:
constant[Find all the patches created through decorators.
Parameters
----------
modules : list of module
Modules and/or packages to search the patches in.
recursive : bool
``True`` to search recursively in subpackages.
Returns
-------
list of gorilla.Patch
Patches found.
Raises
------
TypeError
The input is not a valid package or module.
See Also
--------
:func:`patch`, :func:`patches`.
]
variable[out] assign[=] list[[]]
variable[modules] assign[=] <ast.GeneratorExp object at 0x7da1b1191540>
for taget[name[module]] in starred[name[modules]] begin[:]
variable[members] assign[=] call[name[_get_members], parameter[name[module]]]
for taget[tuple[[<ast.Name object at 0x7da1b11918d0>, <ast.Name object at 0x7da1b1193430>]]] in starred[name[members]] begin[:]
variable[base] assign[=] call[name[_get_base], parameter[name[value]]]
variable[decorator_data] assign[=] call[name[get_decorator_data], parameter[name[base]]]
if compare[name[decorator_data] is constant[None]] begin[:]
continue
call[name[out].extend, parameter[name[decorator_data].patches]]
return[name[out]] | keyword[def] identifier[find_patches] ( identifier[modules] , identifier[recursive] = keyword[True] ):
literal[string]
identifier[out] =[]
identifier[modules] =( identifier[module]
keyword[for] identifier[package] keyword[in] identifier[modules]
keyword[for] identifier[module] keyword[in] identifier[_module_iterator] ( identifier[package] , identifier[recursive] = identifier[recursive] ))
keyword[for] identifier[module] keyword[in] identifier[modules] :
identifier[members] = identifier[_get_members] ( identifier[module] , identifier[filter] = keyword[None] )
keyword[for] identifier[_] , identifier[value] keyword[in] identifier[members] :
identifier[base] = identifier[_get_base] ( identifier[value] )
identifier[decorator_data] = identifier[get_decorator_data] ( identifier[base] )
keyword[if] identifier[decorator_data] keyword[is] keyword[None] :
keyword[continue]
identifier[out] . identifier[extend] ( identifier[decorator_data] . identifier[patches] )
keyword[return] identifier[out] | def find_patches(modules, recursive=True):
"""Find all the patches created through decorators.
Parameters
----------
modules : list of module
Modules and/or packages to search the patches in.
recursive : bool
``True`` to search recursively in subpackages.
Returns
-------
list of gorilla.Patch
Patches found.
Raises
------
TypeError
The input is not a valid package or module.
See Also
--------
:func:`patch`, :func:`patches`.
"""
out = []
modules = (module for package in modules for module in _module_iterator(package, recursive=recursive))
for module in modules:
members = _get_members(module, filter=None)
for (_, value) in members:
base = _get_base(value)
decorator_data = get_decorator_data(base)
if decorator_data is None:
continue # depends on [control=['if'], data=[]]
out.extend(decorator_data.patches) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['module']]
return out |
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row | def function[load, parameter[self, f]]:
constant[Load a PNG image]
variable[SUPPORTED_COLOR_TYPES] assign[=] tuple[[<ast.Name object at 0x7da1b0aa6f50>, <ast.Name object at 0x7da1b0aa6b30>]]
variable[SAMPLES_PER_PIXEL] assign[=] dictionary[[<ast.Name object at 0x7da1b0aa57b0>, <ast.Name object at 0x7da1b0aa5870>], [<ast.Constant object at 0x7da1b0aa5030>, <ast.Constant object at 0x7da1b0aa69e0>]]
assert[compare[call[name[f].read, parameter[constant[8]]] equal[==] name[SIGNATURE]]]
variable[chunks] assign[=] call[name[iter], parameter[call[name[self].chunks, parameter[name[f]]]]]
variable[header] assign[=] call[name[next], parameter[name[chunks]]]
assert[compare[call[name[header]][constant[0]] equal[==] constant[b'IHDR']]]
<ast.Tuple object at 0x7da1b092ea70> assign[=] call[name[struct].unpack, parameter[constant[b'!2I5B'], call[name[header]][constant[1]]]]
if compare[name[bit_depth] not_equal[!=] constant[8]] begin[:]
<ast.Raise object at 0x7da1b092e020>
if compare[name[compression] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b092c370>
if compare[name[filter_type] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b092db70>
if compare[name[interlace] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b092ef50>
if compare[name[color_type] <ast.NotIn object at 0x7da2590d7190> name[SUPPORTED_COLOR_TYPES]] begin[:]
<ast.Raise object at 0x7da1b092e740>
name[self].width assign[=] name[width]
name[self].height assign[=] name[height]
name[self].canvas assign[=] call[name[bytearray], parameter[binary_operation[binary_operation[name[self].bgcolor * name[width]] * name[height]]]]
variable[bytes_per_pixel] assign[=] call[name[SAMPLES_PER_PIXEL]][name[color_type]]
variable[bytes_per_row] assign[=] binary_operation[name[bytes_per_pixel] * name[width]]
variable[bytes_per_rgba_row] assign[=] binary_operation[call[name[SAMPLES_PER_PIXEL]][name[COLOR_TYPE_TRUECOLOR_WITH_ALPHA]] * name[width]]
variable[bytes_per_scanline] assign[=] binary_operation[name[bytes_per_row] + constant[1]]
variable[scanline_fmt] assign[=] call[binary_operation[constant[!%dB] <ast.Mod object at 0x7da2590d6920> name[bytes_per_scanline]].encode, parameter[constant[ascii]]]
variable[reader] assign[=] call[name[ByteReader], parameter[name[chunks]]]
variable[old_row] assign[=] constant[None]
variable[cursor] assign[=] constant[0]
for taget[name[row]] in starred[call[name[range], parameter[name[height]]]] begin[:]
variable[scanline] assign[=] call[name[reader].read, parameter[name[bytes_per_scanline]]]
variable[unpacked] assign[=] call[name[list], parameter[call[name[struct].unpack, parameter[name[scanline_fmt], name[scanline]]]]]
variable[old_row] assign[=] call[name[self].defilter, parameter[call[name[unpacked]][<ast.Slice object at 0x7da1b0ac9e10>], name[old_row], call[name[unpacked]][constant[0]]]]
variable[rgba_row] assign[=] <ast.IfExp object at 0x7da1b0aca620>
call[name[self].canvas][<ast.Slice object at 0x7da1b0ac86a0>] assign[=] name[rgba_row]
<ast.AugAssign object at 0x7da1b0ac9ab0> | keyword[def] identifier[load] ( identifier[self] , identifier[f] ):
literal[string]
identifier[SUPPORTED_COLOR_TYPES] =( identifier[COLOR_TYPE_TRUECOLOR] , identifier[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] )
identifier[SAMPLES_PER_PIXEL] ={ identifier[COLOR_TYPE_TRUECOLOR] : literal[int] ,
identifier[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] : literal[int] }
keyword[assert] identifier[f] . identifier[read] ( literal[int] )== identifier[SIGNATURE]
identifier[chunks] = identifier[iter] ( identifier[self] . identifier[chunks] ( identifier[f] ))
identifier[header] = identifier[next] ( identifier[chunks] )
keyword[assert] identifier[header] [ literal[int] ]== literal[string]
( identifier[width] , identifier[height] , identifier[bit_depth] , identifier[color_type] , identifier[compression] ,
identifier[filter_type] , identifier[interlace] )= identifier[struct] . identifier[unpack] ( literal[string] , identifier[header] [ literal[int] ])
keyword[if] identifier[bit_depth] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[bit_depth] ))
keyword[if] identifier[compression] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[compression] ))
keyword[if] identifier[filter_type] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[filter_type] ))
keyword[if] identifier[interlace] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[interlace] ))
keyword[if] identifier[color_type] keyword[not] keyword[in] identifier[SUPPORTED_COLOR_TYPES] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SUPPORTED_COLOR_TYPES] ))
identifier[self] . identifier[width] = identifier[width]
identifier[self] . identifier[height] = identifier[height]
identifier[self] . identifier[canvas] = identifier[bytearray] ( identifier[self] . identifier[bgcolor] * identifier[width] * identifier[height] )
identifier[bytes_per_pixel] = identifier[SAMPLES_PER_PIXEL] [ identifier[color_type] ]
identifier[bytes_per_row] = identifier[bytes_per_pixel] * identifier[width]
identifier[bytes_per_rgba_row] = identifier[SAMPLES_PER_PIXEL] [ identifier[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] ]* identifier[width]
identifier[bytes_per_scanline] = identifier[bytes_per_row] + literal[int]
identifier[scanline_fmt] =( literal[string] % identifier[bytes_per_scanline] ). identifier[encode] ( literal[string] )
identifier[reader] = identifier[ByteReader] ( identifier[chunks] )
identifier[old_row] = keyword[None]
identifier[cursor] = literal[int]
keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[height] ):
identifier[scanline] = identifier[reader] . identifier[read] ( identifier[bytes_per_scanline] )
identifier[unpacked] = identifier[list] ( identifier[struct] . identifier[unpack] ( identifier[scanline_fmt] , identifier[scanline] ))
identifier[old_row] = identifier[self] . identifier[defilter] ( identifier[unpacked] [ literal[int] :], identifier[old_row] , identifier[unpacked] [ literal[int] ], identifier[bpp] = identifier[bytes_per_pixel] )
identifier[rgba_row] = identifier[old_row] keyword[if] identifier[color_type] == identifier[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] keyword[else] identifier[rgb2rgba] ( identifier[old_row] )
identifier[self] . identifier[canvas] [ identifier[cursor] : identifier[cursor] + identifier[bytes_per_rgba_row] ]= identifier[rgba_row]
identifier[cursor] += identifier[bytes_per_rgba_row] | def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = {COLOR_TYPE_TRUECOLOR: 3, COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4}
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression, filter_type, interlace) = struct.unpack(b'!2I5B', header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth)) # depends on [control=['if'], data=['bit_depth']]
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression)) # depends on [control=['if'], data=['compression']]
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type)) # depends on [control=['if'], data=['filter_type']]
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace)) # depends on [control=['if'], data=['interlace']]
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES)) # depends on [control=['if'], data=['SUPPORTED_COLOR_TYPES']]
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row # depends on [control=['for'], data=[]] |
def get_all(cls, account=None, location=None, include_disabled=False):
"""Returns a list of all resources for a given account, location and resource type.
Attributes:
account (:obj:`Account`): Account owning the resources
location (`str`): Location of the resources to return (region)
include_disabled (`bool`): Include resources from disabled accounts (default: False)
Returns:
list of resource objects
"""
qry = db.Resource.filter(
Resource.resource_type_id == ResourceType.get(cls.resource_type).resource_type_id
)
if account:
qry = qry.filter(Resource.account_id == account.account_id)
if not include_disabled:
qry = qry.join(Account, Resource.account_id == Account.account_id).filter(Account.enabled == 1)
if location:
qry = qry.filter(Resource.location == location)
return {res.resource_id: cls(res) for res in qry.all()} | def function[get_all, parameter[cls, account, location, include_disabled]]:
constant[Returns a list of all resources for a given account, location and resource type.
Attributes:
account (:obj:`Account`): Account owning the resources
location (`str`): Location of the resources to return (region)
include_disabled (`bool`): Include resources from disabled accounts (default: False)
Returns:
list of resource objects
]
variable[qry] assign[=] call[name[db].Resource.filter, parameter[compare[name[Resource].resource_type_id equal[==] call[name[ResourceType].get, parameter[name[cls].resource_type]].resource_type_id]]]
if name[account] begin[:]
variable[qry] assign[=] call[name[qry].filter, parameter[compare[name[Resource].account_id equal[==] name[account].account_id]]]
if <ast.UnaryOp object at 0x7da1b204af50> begin[:]
variable[qry] assign[=] call[call[name[qry].join, parameter[name[Account], compare[name[Resource].account_id equal[==] name[Account].account_id]]].filter, parameter[compare[name[Account].enabled equal[==] constant[1]]]]
if name[location] begin[:]
variable[qry] assign[=] call[name[qry].filter, parameter[compare[name[Resource].location equal[==] name[location]]]]
return[<ast.DictComp object at 0x7da1b204beb0>] | keyword[def] identifier[get_all] ( identifier[cls] , identifier[account] = keyword[None] , identifier[location] = keyword[None] , identifier[include_disabled] = keyword[False] ):
literal[string]
identifier[qry] = identifier[db] . identifier[Resource] . identifier[filter] (
identifier[Resource] . identifier[resource_type_id] == identifier[ResourceType] . identifier[get] ( identifier[cls] . identifier[resource_type] ). identifier[resource_type_id]
)
keyword[if] identifier[account] :
identifier[qry] = identifier[qry] . identifier[filter] ( identifier[Resource] . identifier[account_id] == identifier[account] . identifier[account_id] )
keyword[if] keyword[not] identifier[include_disabled] :
identifier[qry] = identifier[qry] . identifier[join] ( identifier[Account] , identifier[Resource] . identifier[account_id] == identifier[Account] . identifier[account_id] ). identifier[filter] ( identifier[Account] . identifier[enabled] == literal[int] )
keyword[if] identifier[location] :
identifier[qry] = identifier[qry] . identifier[filter] ( identifier[Resource] . identifier[location] == identifier[location] )
keyword[return] { identifier[res] . identifier[resource_id] : identifier[cls] ( identifier[res] ) keyword[for] identifier[res] keyword[in] identifier[qry] . identifier[all] ()} | def get_all(cls, account=None, location=None, include_disabled=False):
"""Returns a list of all resources for a given account, location and resource type.
Attributes:
account (:obj:`Account`): Account owning the resources
location (`str`): Location of the resources to return (region)
include_disabled (`bool`): Include resources from disabled accounts (default: False)
Returns:
list of resource objects
"""
qry = db.Resource.filter(Resource.resource_type_id == ResourceType.get(cls.resource_type).resource_type_id)
if account:
qry = qry.filter(Resource.account_id == account.account_id) # depends on [control=['if'], data=[]]
if not include_disabled:
qry = qry.join(Account, Resource.account_id == Account.account_id).filter(Account.enabled == 1) # depends on [control=['if'], data=[]]
if location:
qry = qry.filter(Resource.location == location) # depends on [control=['if'], data=[]]
return {res.resource_id: cls(res) for res in qry.all()} |
def _match(self, doc, where):
"""Return True if 'doc' matches the 'where' condition."""
assert isinstance(where, dict), "where is not a dictionary"
assert isinstance(doc, dict), "doc is not a dictionary"
try:
return all([doc[k] == v for k, v in where.items()])
except KeyError:
return False | def function[_match, parameter[self, doc, where]]:
constant[Return True if 'doc' matches the 'where' condition.]
assert[call[name[isinstance], parameter[name[where], name[dict]]]]
assert[call[name[isinstance], parameter[name[doc], name[dict]]]]
<ast.Try object at 0x7da18c4cfc70> | keyword[def] identifier[_match] ( identifier[self] , identifier[doc] , identifier[where] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[where] , identifier[dict] ), literal[string]
keyword[assert] identifier[isinstance] ( identifier[doc] , identifier[dict] ), literal[string]
keyword[try] :
keyword[return] identifier[all] ([ identifier[doc] [ identifier[k] ]== identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[where] . identifier[items] ()])
keyword[except] identifier[KeyError] :
keyword[return] keyword[False] | def _match(self, doc, where):
"""Return True if 'doc' matches the 'where' condition."""
assert isinstance(where, dict), 'where is not a dictionary'
assert isinstance(doc, dict), 'doc is not a dictionary'
try:
return all([doc[k] == v for (k, v) in where.items()]) # depends on [control=['try'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]] |
def enqueue_message(self, message, timeout):
"""Add the given message to this transport's queue.
This method also handles ACKing any WRTE messages.
Args:
message: The AdbMessage to enqueue.
timeout: The timeout to use for the operation. Specifically, WRTE
messages cause an OKAY to be sent; timeout is used for that send.
"""
# Ack WRTE messages immediately, handle our OPEN ack if it gets enqueued.
if message.command == 'WRTE':
self._send_command('OKAY', timeout=timeout)
elif message.command == 'OKAY':
self._set_or_check_remote_id(message.arg0)
self.message_queue.put(message) | def function[enqueue_message, parameter[self, message, timeout]]:
constant[Add the given message to this transport's queue.
This method also handles ACKing any WRTE messages.
Args:
message: The AdbMessage to enqueue.
timeout: The timeout to use for the operation. Specifically, WRTE
messages cause an OKAY to be sent; timeout is used for that send.
]
if compare[name[message].command equal[==] constant[WRTE]] begin[:]
call[name[self]._send_command, parameter[constant[OKAY]]]
call[name[self].message_queue.put, parameter[name[message]]] | keyword[def] identifier[enqueue_message] ( identifier[self] , identifier[message] , identifier[timeout] ):
literal[string]
keyword[if] identifier[message] . identifier[command] == literal[string] :
identifier[self] . identifier[_send_command] ( literal[string] , identifier[timeout] = identifier[timeout] )
keyword[elif] identifier[message] . identifier[command] == literal[string] :
identifier[self] . identifier[_set_or_check_remote_id] ( identifier[message] . identifier[arg0] )
identifier[self] . identifier[message_queue] . identifier[put] ( identifier[message] ) | def enqueue_message(self, message, timeout):
"""Add the given message to this transport's queue.
This method also handles ACKing any WRTE messages.
Args:
message: The AdbMessage to enqueue.
timeout: The timeout to use for the operation. Specifically, WRTE
messages cause an OKAY to be sent; timeout is used for that send.
"""
# Ack WRTE messages immediately, handle our OPEN ack if it gets enqueued.
if message.command == 'WRTE':
self._send_command('OKAY', timeout=timeout) # depends on [control=['if'], data=[]]
elif message.command == 'OKAY':
self._set_or_check_remote_id(message.arg0) # depends on [control=['if'], data=[]]
self.message_queue.put(message) |
def run_server(port=5010, debug=False, **kwargs):
"""
Run the cauldron http server used to interact with cauldron from a remote
host.
:param port:
The port on which to bind the cauldron server.
:param debug:
Whether or not the server should be run in debug mode. If true, the
server will echo debugging information during operation.
:param kwargs:
Custom properties to alter the way the server runs.
"""
from cauldron.cli.server import run
run.execute(port=port, debug=debug, **kwargs) | def function[run_server, parameter[port, debug]]:
constant[
Run the cauldron http server used to interact with cauldron from a remote
host.
:param port:
The port on which to bind the cauldron server.
:param debug:
Whether or not the server should be run in debug mode. If true, the
server will echo debugging information during operation.
:param kwargs:
Custom properties to alter the way the server runs.
]
from relative_module[cauldron.cli.server] import module[run]
call[name[run].execute, parameter[]] | keyword[def] identifier[run_server] ( identifier[port] = literal[int] , identifier[debug] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[cauldron] . identifier[cli] . identifier[server] keyword[import] identifier[run]
identifier[run] . identifier[execute] ( identifier[port] = identifier[port] , identifier[debug] = identifier[debug] ,** identifier[kwargs] ) | def run_server(port=5010, debug=False, **kwargs):
"""
Run the cauldron http server used to interact with cauldron from a remote
host.
:param port:
The port on which to bind the cauldron server.
:param debug:
Whether or not the server should be run in debug mode. If true, the
server will echo debugging information during operation.
:param kwargs:
Custom properties to alter the way the server runs.
"""
from cauldron.cli.server import run
run.execute(port=port, debug=debug, **kwargs) |
def eval_gpr(expr, knockouts):
"""evaluate compiled ast of gene_reaction_rule with knockouts
Parameters
----------
expr : Expression
The ast of the gene reaction rule
knockouts : DictList, set
Set of genes that are knocked out
Returns
-------
bool
True if the gene reaction rule is true with the given knockouts
otherwise false
"""
if isinstance(expr, Expression):
return eval_gpr(expr.body, knockouts)
elif isinstance(expr, Name):
return expr.id not in knockouts
elif isinstance(expr, BoolOp):
op = expr.op
if isinstance(op, Or):
return any(eval_gpr(i, knockouts) for i in expr.values)
elif isinstance(op, And):
return all(eval_gpr(i, knockouts) for i in expr.values)
else:
raise TypeError("unsupported operation " + op.__class__.__name__)
elif expr is None:
return True
else:
raise TypeError("unsupported operation " + repr(expr)) | def function[eval_gpr, parameter[expr, knockouts]]:
constant[evaluate compiled ast of gene_reaction_rule with knockouts
Parameters
----------
expr : Expression
The ast of the gene reaction rule
knockouts : DictList, set
Set of genes that are knocked out
Returns
-------
bool
True if the gene reaction rule is true with the given knockouts
otherwise false
]
if call[name[isinstance], parameter[name[expr], name[Expression]]] begin[:]
return[call[name[eval_gpr], parameter[name[expr].body, name[knockouts]]]] | keyword[def] identifier[eval_gpr] ( identifier[expr] , identifier[knockouts] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[expr] , identifier[Expression] ):
keyword[return] identifier[eval_gpr] ( identifier[expr] . identifier[body] , identifier[knockouts] )
keyword[elif] identifier[isinstance] ( identifier[expr] , identifier[Name] ):
keyword[return] identifier[expr] . identifier[id] keyword[not] keyword[in] identifier[knockouts]
keyword[elif] identifier[isinstance] ( identifier[expr] , identifier[BoolOp] ):
identifier[op] = identifier[expr] . identifier[op]
keyword[if] identifier[isinstance] ( identifier[op] , identifier[Or] ):
keyword[return] identifier[any] ( identifier[eval_gpr] ( identifier[i] , identifier[knockouts] ) keyword[for] identifier[i] keyword[in] identifier[expr] . identifier[values] )
keyword[elif] identifier[isinstance] ( identifier[op] , identifier[And] ):
keyword[return] identifier[all] ( identifier[eval_gpr] ( identifier[i] , identifier[knockouts] ) keyword[for] identifier[i] keyword[in] identifier[expr] . identifier[values] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] + identifier[op] . identifier[__class__] . identifier[__name__] )
keyword[elif] identifier[expr] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] + identifier[repr] ( identifier[expr] )) | def eval_gpr(expr, knockouts):
"""evaluate compiled ast of gene_reaction_rule with knockouts
Parameters
----------
expr : Expression
The ast of the gene reaction rule
knockouts : DictList, set
Set of genes that are knocked out
Returns
-------
bool
True if the gene reaction rule is true with the given knockouts
otherwise false
"""
if isinstance(expr, Expression):
return eval_gpr(expr.body, knockouts) # depends on [control=['if'], data=[]]
elif isinstance(expr, Name):
return expr.id not in knockouts # depends on [control=['if'], data=[]]
elif isinstance(expr, BoolOp):
op = expr.op
if isinstance(op, Or):
return any((eval_gpr(i, knockouts) for i in expr.values)) # depends on [control=['if'], data=[]]
elif isinstance(op, And):
return all((eval_gpr(i, knockouts) for i in expr.values)) # depends on [control=['if'], data=[]]
else:
raise TypeError('unsupported operation ' + op.__class__.__name__) # depends on [control=['if'], data=[]]
elif expr is None:
return True # depends on [control=['if'], data=[]]
else:
raise TypeError('unsupported operation ' + repr(expr)) |
def parse_yes_no_str(bool_str):
"""Parse a string serialization of boolean data as yes (Y) or no (N).
Prase a string serialization of boolean data where True is "Y" and False is
"N" case-insensitive.
@param bool_str: The string to parse.
@type bool_str: str
@return: The interpreted string.
@rtype: bool
@raise ValueError: Raised if the passed string is not equal to 'N' or 'Y'
case insensitive.
"""
lower_bool_str = bool_str.lower()
if lower_bool_str == 'n':
return False
elif lower_bool_str == 'y':
return True
else:
raise ValueError('%s not a valid boolean string.' % bool_str) | def function[parse_yes_no_str, parameter[bool_str]]:
constant[Parse a string serialization of boolean data as yes (Y) or no (N).
Prase a string serialization of boolean data where True is "Y" and False is
"N" case-insensitive.
@param bool_str: The string to parse.
@type bool_str: str
@return: The interpreted string.
@rtype: bool
@raise ValueError: Raised if the passed string is not equal to 'N' or 'Y'
case insensitive.
]
variable[lower_bool_str] assign[=] call[name[bool_str].lower, parameter[]]
if compare[name[lower_bool_str] equal[==] constant[n]] begin[:]
return[constant[False]] | keyword[def] identifier[parse_yes_no_str] ( identifier[bool_str] ):
literal[string]
identifier[lower_bool_str] = identifier[bool_str] . identifier[lower] ()
keyword[if] identifier[lower_bool_str] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[lower_bool_str] == literal[string] :
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[bool_str] ) | def parse_yes_no_str(bool_str):
"""Parse a string serialization of boolean data as yes (Y) or no (N).
Prase a string serialization of boolean data where True is "Y" and False is
"N" case-insensitive.
@param bool_str: The string to parse.
@type bool_str: str
@return: The interpreted string.
@rtype: bool
@raise ValueError: Raised if the passed string is not equal to 'N' or 'Y'
case insensitive.
"""
lower_bool_str = bool_str.lower()
if lower_bool_str == 'n':
return False # depends on [control=['if'], data=[]]
elif lower_bool_str == 'y':
return True # depends on [control=['if'], data=[]]
else:
raise ValueError('%s not a valid boolean string.' % bool_str) |
def find_stack_elements(self, module, module_name="", _visited_modules=None):
"""
This function goes through the given container and returns the stack elements. Each stack
element is represented by a tuple:
( container_name, element_name, stack_element)
The tuples are returned in an array
"""
from types import ModuleType
if _visited_modules is None: _visited_modules = []
_visited_modules.append(module)
#
elements = []
for el_name in dir(module):
the_el = module.__getattribute__(el_name)
if isinstance(the_el, ModuleType):
# Recursively go into the module
if the_el in _visited_modules:
continue
elements = elements + self.find_stack_elements(the_el, module_name + el_name + ".", _visited_modules)
elif isinstance(the_el, StackElement):
# Add to list
elements.append((module_name, el_name, the_el))
return elements | def function[find_stack_elements, parameter[self, module, module_name, _visited_modules]]:
constant[
This function goes through the given container and returns the stack elements. Each stack
element is represented by a tuple:
( container_name, element_name, stack_element)
The tuples are returned in an array
]
from relative_module[types] import module[ModuleType]
if compare[name[_visited_modules] is constant[None]] begin[:]
variable[_visited_modules] assign[=] list[[]]
call[name[_visited_modules].append, parameter[name[module]]]
variable[elements] assign[=] list[[]]
for taget[name[el_name]] in starred[call[name[dir], parameter[name[module]]]] begin[:]
variable[the_el] assign[=] call[name[module].__getattribute__, parameter[name[el_name]]]
if call[name[isinstance], parameter[name[the_el], name[ModuleType]]] begin[:]
if compare[name[the_el] in name[_visited_modules]] begin[:]
continue
variable[elements] assign[=] binary_operation[name[elements] + call[name[self].find_stack_elements, parameter[name[the_el], binary_operation[binary_operation[name[module_name] + name[el_name]] + constant[.]], name[_visited_modules]]]]
return[name[elements]] | keyword[def] identifier[find_stack_elements] ( identifier[self] , identifier[module] , identifier[module_name] = literal[string] , identifier[_visited_modules] = keyword[None] ):
literal[string]
keyword[from] identifier[types] keyword[import] identifier[ModuleType]
keyword[if] identifier[_visited_modules] keyword[is] keyword[None] : identifier[_visited_modules] =[]
identifier[_visited_modules] . identifier[append] ( identifier[module] )
identifier[elements] =[]
keyword[for] identifier[el_name] keyword[in] identifier[dir] ( identifier[module] ):
identifier[the_el] = identifier[module] . identifier[__getattribute__] ( identifier[el_name] )
keyword[if] identifier[isinstance] ( identifier[the_el] , identifier[ModuleType] ):
keyword[if] identifier[the_el] keyword[in] identifier[_visited_modules] :
keyword[continue]
identifier[elements] = identifier[elements] + identifier[self] . identifier[find_stack_elements] ( identifier[the_el] , identifier[module_name] + identifier[el_name] + literal[string] , identifier[_visited_modules] )
keyword[elif] identifier[isinstance] ( identifier[the_el] , identifier[StackElement] ):
identifier[elements] . identifier[append] (( identifier[module_name] , identifier[el_name] , identifier[the_el] ))
keyword[return] identifier[elements] | def find_stack_elements(self, module, module_name='', _visited_modules=None):
"""
This function goes through the given container and returns the stack elements. Each stack
element is represented by a tuple:
( container_name, element_name, stack_element)
The tuples are returned in an array
"""
from types import ModuleType
if _visited_modules is None:
_visited_modules = [] # depends on [control=['if'], data=['_visited_modules']]
_visited_modules.append(module)
#
elements = []
for el_name in dir(module):
the_el = module.__getattribute__(el_name)
if isinstance(the_el, ModuleType):
# Recursively go into the module
if the_el in _visited_modules:
continue # depends on [control=['if'], data=[]]
elements = elements + self.find_stack_elements(the_el, module_name + el_name + '.', _visited_modules) # depends on [control=['if'], data=[]]
elif isinstance(the_el, StackElement):
# Add to list
elements.append((module_name, el_name, the_el)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['el_name']]
return elements |
def nsorted(
to_sort: Iterable[str],
key: Optional[Callable[[str], Any]] = None
) -> List[str]:
"""Returns a naturally sorted list"""
if key is None:
key_callback = _natural_keys
else:
def key_callback(text: str) -> List[Any]:
return _natural_keys(key(text)) # type: ignore
return sorted(to_sort, key=key_callback) | def function[nsorted, parameter[to_sort, key]]:
constant[Returns a naturally sorted list]
if compare[name[key] is constant[None]] begin[:]
variable[key_callback] assign[=] name[_natural_keys]
return[call[name[sorted], parameter[name[to_sort]]]] | keyword[def] identifier[nsorted] (
identifier[to_sort] : identifier[Iterable] [ identifier[str] ],
identifier[key] : identifier[Optional] [ identifier[Callable] [[ identifier[str] ], identifier[Any] ]]= keyword[None]
)-> identifier[List] [ identifier[str] ]:
literal[string]
keyword[if] identifier[key] keyword[is] keyword[None] :
identifier[key_callback] = identifier[_natural_keys]
keyword[else] :
keyword[def] identifier[key_callback] ( identifier[text] : identifier[str] )-> identifier[List] [ identifier[Any] ]:
keyword[return] identifier[_natural_keys] ( identifier[key] ( identifier[text] ))
keyword[return] identifier[sorted] ( identifier[to_sort] , identifier[key] = identifier[key_callback] ) | def nsorted(to_sort: Iterable[str], key: Optional[Callable[[str], Any]]=None) -> List[str]:
"""Returns a naturally sorted list"""
if key is None:
key_callback = _natural_keys # depends on [control=['if'], data=[]]
else:
def key_callback(text: str) -> List[Any]:
return _natural_keys(key(text)) # type: ignore
return sorted(to_sort, key=key_callback) |
def get_gtf():
"""
Download GTF file from Ensembl, only keeping the chr17 entries.
"""
size, md5, url = GTF
full_gtf = os.path.join(args.data_dir, os.path.basename(url))
subset_gtf = os.path.join(
args.data_dir,
os.path.basename(url).replace('.gtf.gz', '_%s.gtf' % CHROM))
if not _up_to_date(md5, subset_gtf):
download(url, full_gtf)
cmds = [
'zcat', '<',
full_gtf,
'|', 'awk -F "\\t" \'{if ($1 == "%s") print $0}\''
% CHROM.replace('chr', ''),
'|', 'awk \'{print "chr"$0}\'', '>', subset_gtf]
logged_command(cmds) | def function[get_gtf, parameter[]]:
constant[
Download GTF file from Ensembl, only keeping the chr17 entries.
]
<ast.Tuple object at 0x7da1b27ba470> assign[=] name[GTF]
variable[full_gtf] assign[=] call[name[os].path.join, parameter[name[args].data_dir, call[name[os].path.basename, parameter[name[url]]]]]
variable[subset_gtf] assign[=] call[name[os].path.join, parameter[name[args].data_dir, call[call[name[os].path.basename, parameter[name[url]]].replace, parameter[constant[.gtf.gz], binary_operation[constant[_%s.gtf] <ast.Mod object at 0x7da2590d6920> name[CHROM]]]]]]
if <ast.UnaryOp object at 0x7da20c6ab190> begin[:]
call[name[download], parameter[name[url], name[full_gtf]]]
variable[cmds] assign[=] list[[<ast.Constant object at 0x7da20c6a8fd0>, <ast.Constant object at 0x7da20c6aaad0>, <ast.Name object at 0x7da20c6ab5b0>, <ast.Constant object at 0x7da20c6aab90>, <ast.BinOp object at 0x7da20c6aa8f0>, <ast.Constant object at 0x7da1b27bba60>, <ast.Constant object at 0x7da1b27bae60>, <ast.Constant object at 0x7da1b27bb550>, <ast.Name object at 0x7da1b27ba200>]]
call[name[logged_command], parameter[name[cmds]]] | keyword[def] identifier[get_gtf] ():
literal[string]
identifier[size] , identifier[md5] , identifier[url] = identifier[GTF]
identifier[full_gtf] = identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[data_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[url] ))
identifier[subset_gtf] = identifier[os] . identifier[path] . identifier[join] (
identifier[args] . identifier[data_dir] ,
identifier[os] . identifier[path] . identifier[basename] ( identifier[url] ). identifier[replace] ( literal[string] , literal[string] % identifier[CHROM] ))
keyword[if] keyword[not] identifier[_up_to_date] ( identifier[md5] , identifier[subset_gtf] ):
identifier[download] ( identifier[url] , identifier[full_gtf] )
identifier[cmds] =[
literal[string] , literal[string] ,
identifier[full_gtf] ,
literal[string] , literal[string]
% identifier[CHROM] . identifier[replace] ( literal[string] , literal[string] ),
literal[string] , literal[string] , literal[string] , identifier[subset_gtf] ]
identifier[logged_command] ( identifier[cmds] ) | def get_gtf():
"""
Download GTF file from Ensembl, only keeping the chr17 entries.
"""
(size, md5, url) = GTF
full_gtf = os.path.join(args.data_dir, os.path.basename(url))
subset_gtf = os.path.join(args.data_dir, os.path.basename(url).replace('.gtf.gz', '_%s.gtf' % CHROM))
if not _up_to_date(md5, subset_gtf):
download(url, full_gtf)
cmds = ['zcat', '<', full_gtf, '|', 'awk -F "\\t" \'{if ($1 == "%s") print $0}\'' % CHROM.replace('chr', ''), '|', 'awk \'{print "chr"$0}\'', '>', subset_gtf]
logged_command(cmds) # depends on [control=['if'], data=[]] |
def create(self, pools):
"""
Method to create pool's
:param pools: List containing pool's desired to be created on database
:return: None
"""
data = {'server_pools': pools}
return super(ApiPool, self).post('api/v3/pool/', data) | def function[create, parameter[self, pools]]:
constant[
Method to create pool's
:param pools: List containing pool's desired to be created on database
:return: None
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da2047eb460>], [<ast.Name object at 0x7da2047e9a20>]]
return[call[call[name[super], parameter[name[ApiPool], name[self]]].post, parameter[constant[api/v3/pool/], name[data]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[pools] ):
literal[string]
identifier[data] ={ literal[string] : identifier[pools] }
keyword[return] identifier[super] ( identifier[ApiPool] , identifier[self] ). identifier[post] ( literal[string] , identifier[data] ) | def create(self, pools):
"""
Method to create pool's
:param pools: List containing pool's desired to be created on database
:return: None
"""
data = {'server_pools': pools}
return super(ApiPool, self).post('api/v3/pool/', data) |
def get_history_item(self, index):
u'''Return the current contents of history item at index (starts with index 1).'''
item = self.history[index - 1]
log(u"get_history_item: index:%d item:%r"%(index, item))
return item.get_line_text() | def function[get_history_item, parameter[self, index]]:
constant[Return the current contents of history item at index (starts with index 1).]
variable[item] assign[=] call[name[self].history][binary_operation[name[index] - constant[1]]]
call[name[log], parameter[binary_operation[constant[get_history_item: index:%d item:%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b28ac400>, <ast.Name object at 0x7da1b28ac6a0>]]]]]
return[call[name[item].get_line_text, parameter[]]] | keyword[def] identifier[get_history_item] ( identifier[self] , identifier[index] ):
literal[string]
identifier[item] = identifier[self] . identifier[history] [ identifier[index] - literal[int] ]
identifier[log] ( literal[string] %( identifier[index] , identifier[item] ))
keyword[return] identifier[item] . identifier[get_line_text] () | def get_history_item(self, index):
u"""Return the current contents of history item at index (starts with index 1)."""
item = self.history[index - 1]
log(u'get_history_item: index:%d item:%r' % (index, item))
return item.get_line_text() |
def copytree(src, dst):
"""Recursively copies the source directory to the destination
only if the files are newer or modified by using rsync.
"""
from os import path, waitpid
from subprocess import Popen, PIPE
#Append any trailing / that we need to get rsync to work correctly.
source = path.join(src, "")
desti = path.join(dst, "")
if not path.isdir(desti):
from os import mkdir
mkdir(desti)
prsync = Popen("rsync -t -u -r {} {}".format(source, desti), close_fds=True,
shell=True, executable="/bin/bash", stdout=PIPE, stderr=PIPE)
waitpid(prsync.pid, 0)
#Redirect the output and errors so that we don't pollute stdout.
#output = prsync.stdout.readlines()
error = prsync.stderr.readlines()
prsync.stderr.close()
prsync.stdout.close()
if len(error) > 0:
from fortpy.msg import warn
warn("Error while copying {} using rsync.\n\n{}".format(source, '\n'.join(error))) | def function[copytree, parameter[src, dst]]:
constant[Recursively copies the source directory to the destination
only if the files are newer or modified by using rsync.
]
from relative_module[os] import module[path], module[waitpid]
from relative_module[subprocess] import module[Popen], module[PIPE]
variable[source] assign[=] call[name[path].join, parameter[name[src], constant[]]]
variable[desti] assign[=] call[name[path].join, parameter[name[dst], constant[]]]
if <ast.UnaryOp object at 0x7da1b2545cf0> begin[:]
from relative_module[os] import module[mkdir]
call[name[mkdir], parameter[name[desti]]]
variable[prsync] assign[=] call[name[Popen], parameter[call[constant[rsync -t -u -r {} {}].format, parameter[name[source], name[desti]]]]]
call[name[waitpid], parameter[name[prsync].pid, constant[0]]]
variable[error] assign[=] call[name[prsync].stderr.readlines, parameter[]]
call[name[prsync].stderr.close, parameter[]]
call[name[prsync].stdout.close, parameter[]]
if compare[call[name[len], parameter[name[error]]] greater[>] constant[0]] begin[:]
from relative_module[fortpy.msg] import module[warn]
call[name[warn], parameter[call[constant[Error while copying {} using rsync.
{}].format, parameter[name[source], call[constant[
].join, parameter[name[error]]]]]]] | keyword[def] identifier[copytree] ( identifier[src] , identifier[dst] ):
literal[string]
keyword[from] identifier[os] keyword[import] identifier[path] , identifier[waitpid]
keyword[from] identifier[subprocess] keyword[import] identifier[Popen] , identifier[PIPE]
identifier[source] = identifier[path] . identifier[join] ( identifier[src] , literal[string] )
identifier[desti] = identifier[path] . identifier[join] ( identifier[dst] , literal[string] )
keyword[if] keyword[not] identifier[path] . identifier[isdir] ( identifier[desti] ):
keyword[from] identifier[os] keyword[import] identifier[mkdir]
identifier[mkdir] ( identifier[desti] )
identifier[prsync] = identifier[Popen] ( literal[string] . identifier[format] ( identifier[source] , identifier[desti] ), identifier[close_fds] = keyword[True] ,
identifier[shell] = keyword[True] , identifier[executable] = literal[string] , identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] )
identifier[waitpid] ( identifier[prsync] . identifier[pid] , literal[int] )
identifier[error] = identifier[prsync] . identifier[stderr] . identifier[readlines] ()
identifier[prsync] . identifier[stderr] . identifier[close] ()
identifier[prsync] . identifier[stdout] . identifier[close] ()
keyword[if] identifier[len] ( identifier[error] )> literal[int] :
keyword[from] identifier[fortpy] . identifier[msg] keyword[import] identifier[warn]
identifier[warn] ( literal[string] . identifier[format] ( identifier[source] , literal[string] . identifier[join] ( identifier[error] ))) | def copytree(src, dst):
"""Recursively copies the source directory to the destination
only if the files are newer or modified by using rsync.
"""
from os import path, waitpid
from subprocess import Popen, PIPE
#Append any trailing / that we need to get rsync to work correctly.
source = path.join(src, '')
desti = path.join(dst, '')
if not path.isdir(desti):
from os import mkdir
mkdir(desti) # depends on [control=['if'], data=[]]
prsync = Popen('rsync -t -u -r {} {}'.format(source, desti), close_fds=True, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE)
waitpid(prsync.pid, 0)
#Redirect the output and errors so that we don't pollute stdout.
#output = prsync.stdout.readlines()
error = prsync.stderr.readlines()
prsync.stderr.close()
prsync.stdout.close()
if len(error) > 0:
from fortpy.msg import warn
warn('Error while copying {} using rsync.\n\n{}'.format(source, '\n'.join(error))) # depends on [control=['if'], data=[]] |
def generate_facecolors(x, y, z, dx, dy, dz, color):
"""Generates shaded facecolors for shaded bars.
This is here to work around a Matplotlib bug
where alpha does not work in Bar3D.
Args:
x (array_like): The x- coordinates of the anchor point of the bars.
y (array_like): The y- coordinates of the anchor point of the bars.
z (array_like): The z- coordinates of the anchor point of the bars.
dx (array_like): Width of bars.
dy (array_like): Depth of bars.
dz (array_like): Height of bars.
color (array_like): sequence of valid color specifications, optional
Returns:
list: Shaded colors for bars.
"""
cuboid = np.array([
# -z
(
(0, 0, 0),
(0, 1, 0),
(1, 1, 0),
(1, 0, 0),
),
# +z
(
(0, 0, 1),
(1, 0, 1),
(1, 1, 1),
(0, 1, 1),
),
# -y
(
(0, 0, 0),
(1, 0, 0),
(1, 0, 1),
(0, 0, 1),
),
# +y
(
(0, 1, 0),
(0, 1, 1),
(1, 1, 1),
(1, 1, 0),
),
# -x
(
(0, 0, 0),
(0, 0, 1),
(0, 1, 1),
(0, 1, 0),
),
# +x
(
(1, 0, 0),
(1, 1, 0),
(1, 1, 1),
(1, 0, 1),
),
])
# indexed by [bar, face, vertex, coord]
polys = np.empty(x.shape + cuboid.shape)
# handle each coordinate separately
for i, p, dp in [(0, x, dx), (1, y, dy), (2, z, dz)]:
p = p[..., np.newaxis, np.newaxis]
dp = dp[..., np.newaxis, np.newaxis]
polys[..., i] = p + dp * cuboid[..., i]
# collapse the first two axes
polys = polys.reshape((-1,) + polys.shape[2:])
facecolors = []
if len(color) == len(x):
# bar colors specified, need to expand to number of faces
for c in color:
facecolors.extend([c] * 6)
else:
# a single color specified, or face colors specified explicitly
facecolors = list(mcolors.to_rgba_array(color))
if len(facecolors) < len(x):
facecolors *= (6 * len(x))
normals = _generate_normals(polys)
return _shade_colors(facecolors, normals) | def function[generate_facecolors, parameter[x, y, z, dx, dy, dz, color]]:
constant[Generates shaded facecolors for shaded bars.
This is here to work around a Matplotlib bug
where alpha does not work in Bar3D.
Args:
x (array_like): The x- coordinates of the anchor point of the bars.
y (array_like): The y- coordinates of the anchor point of the bars.
z (array_like): The z- coordinates of the anchor point of the bars.
dx (array_like): Width of bars.
dy (array_like): Depth of bars.
dz (array_like): Height of bars.
color (array_like): sequence of valid color specifications, optional
Returns:
list: Shaded colors for bars.
]
variable[cuboid] assign[=] call[name[np].array, parameter[list[[<ast.Tuple object at 0x7da20c992bc0>, <ast.Tuple object at 0x7da20c993f10>, <ast.Tuple object at 0x7da20c993c10>, <ast.Tuple object at 0x7da20c992c80>, <ast.Tuple object at 0x7da20c992530>, <ast.Tuple object at 0x7da1b05acac0>]]]]
variable[polys] assign[=] call[name[np].empty, parameter[binary_operation[name[x].shape + name[cuboid].shape]]]
for taget[tuple[[<ast.Name object at 0x7da1b05ad240>, <ast.Name object at 0x7da1b05ac370>, <ast.Name object at 0x7da1b05ae620>]]] in starred[list[[<ast.Tuple object at 0x7da1b05aceb0>, <ast.Tuple object at 0x7da1b05acbb0>, <ast.Tuple object at 0x7da1b05af6a0>]]] begin[:]
variable[p] assign[=] call[name[p]][tuple[[<ast.Constant object at 0x7da1b05ae5f0>, <ast.Attribute object at 0x7da1b05aedd0>, <ast.Attribute object at 0x7da1b05af8e0>]]]
variable[dp] assign[=] call[name[dp]][tuple[[<ast.Constant object at 0x7da1b05acf40>, <ast.Attribute object at 0x7da1b05ae590>, <ast.Attribute object at 0x7da1b05af1c0>]]]
call[name[polys]][tuple[[<ast.Constant object at 0x7da1b05acdf0>, <ast.Name object at 0x7da1b05ad120>]]] assign[=] binary_operation[name[p] + binary_operation[name[dp] * call[name[cuboid]][tuple[[<ast.Constant object at 0x7da1b05aee30>, <ast.Name object at 0x7da1b05af160>]]]]]
variable[polys] assign[=] call[name[polys].reshape, parameter[binary_operation[tuple[[<ast.UnaryOp object at 0x7da1b05ad330>]] + call[name[polys].shape][<ast.Slice object at 0x7da1b05ae050>]]]]
variable[facecolors] assign[=] list[[]]
if compare[call[name[len], parameter[name[color]]] equal[==] call[name[len], parameter[name[x]]]] begin[:]
for taget[name[c]] in starred[name[color]] begin[:]
call[name[facecolors].extend, parameter[binary_operation[list[[<ast.Name object at 0x7da1b05ad7b0>]] * constant[6]]]]
variable[normals] assign[=] call[name[_generate_normals], parameter[name[polys]]]
return[call[name[_shade_colors], parameter[name[facecolors], name[normals]]]] | keyword[def] identifier[generate_facecolors] ( identifier[x] , identifier[y] , identifier[z] , identifier[dx] , identifier[dy] , identifier[dz] , identifier[color] ):
literal[string]
identifier[cuboid] = identifier[np] . identifier[array] ([
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
(
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] ),
),
])
identifier[polys] = identifier[np] . identifier[empty] ( identifier[x] . identifier[shape] + identifier[cuboid] . identifier[shape] )
keyword[for] identifier[i] , identifier[p] , identifier[dp] keyword[in] [( literal[int] , identifier[x] , identifier[dx] ),( literal[int] , identifier[y] , identifier[dy] ),( literal[int] , identifier[z] , identifier[dz] )]:
identifier[p] = identifier[p] [..., identifier[np] . identifier[newaxis] , identifier[np] . identifier[newaxis] ]
identifier[dp] = identifier[dp] [..., identifier[np] . identifier[newaxis] , identifier[np] . identifier[newaxis] ]
identifier[polys] [..., identifier[i] ]= identifier[p] + identifier[dp] * identifier[cuboid] [..., identifier[i] ]
identifier[polys] = identifier[polys] . identifier[reshape] ((- literal[int] ,)+ identifier[polys] . identifier[shape] [ literal[int] :])
identifier[facecolors] =[]
keyword[if] identifier[len] ( identifier[color] )== identifier[len] ( identifier[x] ):
keyword[for] identifier[c] keyword[in] identifier[color] :
identifier[facecolors] . identifier[extend] ([ identifier[c] ]* literal[int] )
keyword[else] :
identifier[facecolors] = identifier[list] ( identifier[mcolors] . identifier[to_rgba_array] ( identifier[color] ))
keyword[if] identifier[len] ( identifier[facecolors] )< identifier[len] ( identifier[x] ):
identifier[facecolors] *=( literal[int] * identifier[len] ( identifier[x] ))
identifier[normals] = identifier[_generate_normals] ( identifier[polys] )
keyword[return] identifier[_shade_colors] ( identifier[facecolors] , identifier[normals] ) | def generate_facecolors(x, y, z, dx, dy, dz, color):
"""Generates shaded facecolors for shaded bars.
This is here to work around a Matplotlib bug
where alpha does not work in Bar3D.
Args:
x (array_like): The x- coordinates of the anchor point of the bars.
y (array_like): The y- coordinates of the anchor point of the bars.
z (array_like): The z- coordinates of the anchor point of the bars.
dx (array_like): Width of bars.
dy (array_like): Depth of bars.
dz (array_like): Height of bars.
color (array_like): sequence of valid color specifications, optional
Returns:
list: Shaded colors for bars.
"""
# -z
# +z
# -y
# +y
# -x
# +x
cuboid = np.array([((0, 0, 0), (0, 1, 0), (1, 1, 0), (1, 0, 0)), ((0, 0, 1), (1, 0, 1), (1, 1, 1), (0, 1, 1)), ((0, 0, 0), (1, 0, 0), (1, 0, 1), (0, 0, 1)), ((0, 1, 0), (0, 1, 1), (1, 1, 1), (1, 1, 0)), ((0, 0, 0), (0, 0, 1), (0, 1, 1), (0, 1, 0)), ((1, 0, 0), (1, 1, 0), (1, 1, 1), (1, 0, 1))])
# indexed by [bar, face, vertex, coord]
polys = np.empty(x.shape + cuboid.shape)
# handle each coordinate separately
for (i, p, dp) in [(0, x, dx), (1, y, dy), (2, z, dz)]:
p = p[..., np.newaxis, np.newaxis]
dp = dp[..., np.newaxis, np.newaxis]
polys[..., i] = p + dp * cuboid[..., i] # depends on [control=['for'], data=[]]
# collapse the first two axes
polys = polys.reshape((-1,) + polys.shape[2:])
facecolors = []
if len(color) == len(x):
# bar colors specified, need to expand to number of faces
for c in color:
facecolors.extend([c] * 6) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]]
else:
# a single color specified, or face colors specified explicitly
facecolors = list(mcolors.to_rgba_array(color))
if len(facecolors) < len(x):
facecolors *= 6 * len(x) # depends on [control=['if'], data=[]]
normals = _generate_normals(polys)
return _shade_colors(facecolors, normals) |
def string(
element_name, # type: Text
attribute=None, # type: Optional[Text]
required=True, # type: bool
alias=None, # type: Optional[Text]
default='', # type: Optional[Text]
omit_empty=False, # type: bool
strip_whitespace=True, # type: bool
hooks=None # type: Optional[Hooks]
):
# type: (...) -> Processor
"""
Create a processor for string values.
:param strip_whitespace: Indicates whether leading and trailing whitespace should be stripped
from parsed string values.
See also :func:`declxml.boolean`
"""
value_parser = _string_parser(strip_whitespace)
return _PrimitiveValue(
element_name,
value_parser,
attribute,
required,
alias,
default,
omit_empty,
hooks
) | def function[string, parameter[element_name, attribute, required, alias, default, omit_empty, strip_whitespace, hooks]]:
constant[
Create a processor for string values.
:param strip_whitespace: Indicates whether leading and trailing whitespace should be stripped
from parsed string values.
See also :func:`declxml.boolean`
]
variable[value_parser] assign[=] call[name[_string_parser], parameter[name[strip_whitespace]]]
return[call[name[_PrimitiveValue], parameter[name[element_name], name[value_parser], name[attribute], name[required], name[alias], name[default], name[omit_empty], name[hooks]]]] | keyword[def] identifier[string] (
identifier[element_name] ,
identifier[attribute] = keyword[None] ,
identifier[required] = keyword[True] ,
identifier[alias] = keyword[None] ,
identifier[default] = literal[string] ,
identifier[omit_empty] = keyword[False] ,
identifier[strip_whitespace] = keyword[True] ,
identifier[hooks] = keyword[None]
):
literal[string]
identifier[value_parser] = identifier[_string_parser] ( identifier[strip_whitespace] )
keyword[return] identifier[_PrimitiveValue] (
identifier[element_name] ,
identifier[value_parser] ,
identifier[attribute] ,
identifier[required] ,
identifier[alias] ,
identifier[default] ,
identifier[omit_empty] ,
identifier[hooks]
) | def string(element_name, attribute=None, required=True, alias=None, default='', omit_empty=False, strip_whitespace=True, hooks=None): # type: Text
# type: Optional[Text]
# type: bool
# type: Optional[Text]
# type: Optional[Text]
# type: bool
# type: bool
# type: Optional[Hooks]
# type: (...) -> Processor
'\n Create a processor for string values.\n\n :param strip_whitespace: Indicates whether leading and trailing whitespace should be stripped\n from parsed string values.\n\n See also :func:`declxml.boolean`\n '
value_parser = _string_parser(strip_whitespace)
return _PrimitiveValue(element_name, value_parser, attribute, required, alias, default, omit_empty, hooks) |
def p_expr_new(p):
'expr : NEW class_name_reference ctor_arguments'
p[0] = ast.New(p[2], p[3], lineno=p.lineno(1)) | def function[p_expr_new, parameter[p]]:
constant[expr : NEW class_name_reference ctor_arguments]
call[name[p]][constant[0]] assign[=] call[name[ast].New, parameter[call[name[p]][constant[2]], call[name[p]][constant[3]]]] | keyword[def] identifier[p_expr_new] ( identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[ast] . identifier[New] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] )) | def p_expr_new(p):
"""expr : NEW class_name_reference ctor_arguments"""
p[0] = ast.New(p[2], p[3], lineno=p.lineno(1)) |
def configure_relation(graph, ns, mappings):
"""
Register relation endpoint(s) between two resources.
"""
convention = RelationConvention(graph)
convention.configure(ns, mappings) | def function[configure_relation, parameter[graph, ns, mappings]]:
constant[
Register relation endpoint(s) between two resources.
]
variable[convention] assign[=] call[name[RelationConvention], parameter[name[graph]]]
call[name[convention].configure, parameter[name[ns], name[mappings]]] | keyword[def] identifier[configure_relation] ( identifier[graph] , identifier[ns] , identifier[mappings] ):
literal[string]
identifier[convention] = identifier[RelationConvention] ( identifier[graph] )
identifier[convention] . identifier[configure] ( identifier[ns] , identifier[mappings] ) | def configure_relation(graph, ns, mappings):
"""
Register relation endpoint(s) between two resources.
"""
convention = RelationConvention(graph)
convention.configure(ns, mappings) |
def get_activity_by_name(self, activity, category_id = None, resurrect = True):
"""returns activity dict by name and optionally filtering by category.
if activity is found but is marked as deleted, it will be resurrected
unless told otherwise in the resurrect param
"""
category_id = category_id or 0
return self.conn.GetActivityByName(activity, category_id, resurrect) | def function[get_activity_by_name, parameter[self, activity, category_id, resurrect]]:
constant[returns activity dict by name and optionally filtering by category.
if activity is found but is marked as deleted, it will be resurrected
unless told otherwise in the resurrect param
]
variable[category_id] assign[=] <ast.BoolOp object at 0x7da1b16ded10>
return[call[name[self].conn.GetActivityByName, parameter[name[activity], name[category_id], name[resurrect]]]] | keyword[def] identifier[get_activity_by_name] ( identifier[self] , identifier[activity] , identifier[category_id] = keyword[None] , identifier[resurrect] = keyword[True] ):
literal[string]
identifier[category_id] = identifier[category_id] keyword[or] literal[int]
keyword[return] identifier[self] . identifier[conn] . identifier[GetActivityByName] ( identifier[activity] , identifier[category_id] , identifier[resurrect] ) | def get_activity_by_name(self, activity, category_id=None, resurrect=True):
"""returns activity dict by name and optionally filtering by category.
if activity is found but is marked as deleted, it will be resurrected
unless told otherwise in the resurrect param
"""
category_id = category_id or 0
return self.conn.GetActivityByName(activity, category_id, resurrect) |
def get_index_tags(self, index, results, index_tags, column_tags):
'''
Gather the tags for this row of the table (index) based on the
results (all the results from the query).
index_tags and column_tags are the tags to gather.
- Those specified in index_tags contain the tag_group name and the
index of the value we want to extract from the index tuple.
cf. 1 for ipVersion in the IP-MIB::ipSystemStatsTable for example
- Those specified in column_tags contain the name of a column, which
could be a potential result, to use as a tage
cf. ifDescr in the IF-MIB::ifTable for example
'''
tags = []
for idx_tag in index_tags:
tag_group = idx_tag[0]
try:
tag_value = index[idx_tag[1] - 1].prettyPrint()
except IndexError:
self.log.warning("Not enough indexes, skipping this tag")
continue
tags.append("{}:{}".format(tag_group, tag_value))
for col_tag in column_tags:
tag_group = col_tag[0]
try:
tag_value = results[col_tag[1]][index]
except KeyError:
self.log.warning("Column %s not present in the table, skipping this tag", col_tag[1])
continue
if reply_invalid(tag_value):
self.log.warning("Can't deduct tag from column for tag %s", tag_group)
continue
tag_value = tag_value.prettyPrint()
tags.append("{}:{}".format(tag_group, tag_value))
return tags | def function[get_index_tags, parameter[self, index, results, index_tags, column_tags]]:
constant[
Gather the tags for this row of the table (index) based on the
results (all the results from the query).
index_tags and column_tags are the tags to gather.
- Those specified in index_tags contain the tag_group name and the
index of the value we want to extract from the index tuple.
cf. 1 for ipVersion in the IP-MIB::ipSystemStatsTable for example
- Those specified in column_tags contain the name of a column, which
could be a potential result, to use as a tage
cf. ifDescr in the IF-MIB::ifTable for example
]
variable[tags] assign[=] list[[]]
for taget[name[idx_tag]] in starred[name[index_tags]] begin[:]
variable[tag_group] assign[=] call[name[idx_tag]][constant[0]]
<ast.Try object at 0x7da20c6a98a0>
call[name[tags].append, parameter[call[constant[{}:{}].format, parameter[name[tag_group], name[tag_value]]]]]
for taget[name[col_tag]] in starred[name[column_tags]] begin[:]
variable[tag_group] assign[=] call[name[col_tag]][constant[0]]
<ast.Try object at 0x7da20c6ab670>
if call[name[reply_invalid], parameter[name[tag_value]]] begin[:]
call[name[self].log.warning, parameter[constant[Can't deduct tag from column for tag %s], name[tag_group]]]
continue
variable[tag_value] assign[=] call[name[tag_value].prettyPrint, parameter[]]
call[name[tags].append, parameter[call[constant[{}:{}].format, parameter[name[tag_group], name[tag_value]]]]]
return[name[tags]] | keyword[def] identifier[get_index_tags] ( identifier[self] , identifier[index] , identifier[results] , identifier[index_tags] , identifier[column_tags] ):
literal[string]
identifier[tags] =[]
keyword[for] identifier[idx_tag] keyword[in] identifier[index_tags] :
identifier[tag_group] = identifier[idx_tag] [ literal[int] ]
keyword[try] :
identifier[tag_value] = identifier[index] [ identifier[idx_tag] [ literal[int] ]- literal[int] ]. identifier[prettyPrint] ()
keyword[except] identifier[IndexError] :
identifier[self] . identifier[log] . identifier[warning] ( literal[string] )
keyword[continue]
identifier[tags] . identifier[append] ( literal[string] . identifier[format] ( identifier[tag_group] , identifier[tag_value] ))
keyword[for] identifier[col_tag] keyword[in] identifier[column_tags] :
identifier[tag_group] = identifier[col_tag] [ literal[int] ]
keyword[try] :
identifier[tag_value] = identifier[results] [ identifier[col_tag] [ literal[int] ]][ identifier[index] ]
keyword[except] identifier[KeyError] :
identifier[self] . identifier[log] . identifier[warning] ( literal[string] , identifier[col_tag] [ literal[int] ])
keyword[continue]
keyword[if] identifier[reply_invalid] ( identifier[tag_value] ):
identifier[self] . identifier[log] . identifier[warning] ( literal[string] , identifier[tag_group] )
keyword[continue]
identifier[tag_value] = identifier[tag_value] . identifier[prettyPrint] ()
identifier[tags] . identifier[append] ( literal[string] . identifier[format] ( identifier[tag_group] , identifier[tag_value] ))
keyword[return] identifier[tags] | def get_index_tags(self, index, results, index_tags, column_tags):
"""
Gather the tags for this row of the table (index) based on the
results (all the results from the query).
index_tags and column_tags are the tags to gather.
- Those specified in index_tags contain the tag_group name and the
index of the value we want to extract from the index tuple.
cf. 1 for ipVersion in the IP-MIB::ipSystemStatsTable for example
- Those specified in column_tags contain the name of a column, which
could be a potential result, to use as a tage
cf. ifDescr in the IF-MIB::ifTable for example
"""
tags = []
for idx_tag in index_tags:
tag_group = idx_tag[0]
try:
tag_value = index[idx_tag[1] - 1].prettyPrint() # depends on [control=['try'], data=[]]
except IndexError:
self.log.warning('Not enough indexes, skipping this tag')
continue # depends on [control=['except'], data=[]]
tags.append('{}:{}'.format(tag_group, tag_value)) # depends on [control=['for'], data=['idx_tag']]
for col_tag in column_tags:
tag_group = col_tag[0]
try:
tag_value = results[col_tag[1]][index] # depends on [control=['try'], data=[]]
except KeyError:
self.log.warning('Column %s not present in the table, skipping this tag', col_tag[1])
continue # depends on [control=['except'], data=[]]
if reply_invalid(tag_value):
self.log.warning("Can't deduct tag from column for tag %s", tag_group)
continue # depends on [control=['if'], data=[]]
tag_value = tag_value.prettyPrint()
tags.append('{}:{}'.format(tag_group, tag_value)) # depends on [control=['for'], data=['col_tag']]
return tags |
def soup_maker(fh):
""" Takes a file handler returns BeautifulSoup"""
try:
from bs4 import BeautifulSoup
soup = BeautifulSoup(fh, "lxml")
for tag in soup.find_all():
tag.name = tag.name.lower()
except ImportError:
from BeautifulSoup import BeautifulStoneSoup
soup = BeautifulStoneSoup(fh)
return soup | def function[soup_maker, parameter[fh]]:
constant[ Takes a file handler returns BeautifulSoup]
<ast.Try object at 0x7da18ede4e50>
return[name[soup]] | keyword[def] identifier[soup_maker] ( identifier[fh] ):
literal[string]
keyword[try] :
keyword[from] identifier[bs4] keyword[import] identifier[BeautifulSoup]
identifier[soup] = identifier[BeautifulSoup] ( identifier[fh] , literal[string] )
keyword[for] identifier[tag] keyword[in] identifier[soup] . identifier[find_all] ():
identifier[tag] . identifier[name] = identifier[tag] . identifier[name] . identifier[lower] ()
keyword[except] identifier[ImportError] :
keyword[from] identifier[BeautifulSoup] keyword[import] identifier[BeautifulStoneSoup]
identifier[soup] = identifier[BeautifulStoneSoup] ( identifier[fh] )
keyword[return] identifier[soup] | def soup_maker(fh):
""" Takes a file handler returns BeautifulSoup"""
try:
from bs4 import BeautifulSoup
soup = BeautifulSoup(fh, 'lxml')
for tag in soup.find_all():
tag.name = tag.name.lower() # depends on [control=['for'], data=['tag']] # depends on [control=['try'], data=[]]
except ImportError:
from BeautifulSoup import BeautifulStoneSoup
soup = BeautifulStoneSoup(fh) # depends on [control=['except'], data=[]]
return soup |
def status_code(code):
"""Returns response object of given status code."""
redirect = dict(headers=dict(location=REDIRECT_LOCATION))
code_map = {
301: redirect,
302: redirect,
303: redirect,
304: dict(data=''),
305: redirect,
307: redirect,
401: dict(headers={'WWW-Authenticate': 'Basic realm="Fake Realm"'}),
402: dict(
data='Fuck you, pay me!',
headers={
'x-more-info': 'http://vimeo.com/22053820'
}
),
406: dict(data=json.dumps({
'message': 'Client did not request a supported media type.',
'accept': ACCEPTED_MEDIA_TYPES
}),
headers={
'Content-Type': 'application/json'
}),
407: dict(headers={'Proxy-Authenticate': 'Basic realm="Fake Realm"'}),
418: dict( # I'm a teapot!
data=ASCII_ART,
headers={
'x-more-info': 'http://tools.ietf.org/html/rfc2324'
}
),
}
r = make_response()
r.status_code = code
if code in code_map:
m = code_map[code]
if 'data' in m:
r.data = m['data']
if 'headers' in m:
r.headers = m['headers']
return r | def function[status_code, parameter[code]]:
constant[Returns response object of given status code.]
variable[redirect] assign[=] call[name[dict], parameter[]]
variable[code_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b21babc0>, <ast.Constant object at 0x7da1b21ba6b0>, <ast.Constant object at 0x7da1b21bab60>, <ast.Constant object at 0x7da1b21b8280>, <ast.Constant object at 0x7da1b21b9d80>, <ast.Constant object at 0x7da1b21b9b70>, <ast.Constant object at 0x7da1b21b8eb0>, <ast.Constant object at 0x7da1b21ba920>, <ast.Constant object at 0x7da1b21bbe50>, <ast.Constant object at 0x7da1b21b8f70>, <ast.Constant object at 0x7da1b21bb730>], [<ast.Name object at 0x7da1b21b8f10>, <ast.Name object at 0x7da1b21b8910>, <ast.Name object at 0x7da1b21b9720>, <ast.Call object at 0x7da1b21badd0>, <ast.Name object at 0x7da1b21b88b0>, <ast.Name object at 0x7da1b21b9630>, <ast.Call object at 0x7da1b21bb460>, <ast.Call object at 0x7da1b21b93f0>, <ast.Call object at 0x7da1b21b80a0>, <ast.Call object at 0x7da1b21bbb50>, <ast.Call object at 0x7da1b21b9870>]]
variable[r] assign[=] call[name[make_response], parameter[]]
name[r].status_code assign[=] name[code]
if compare[name[code] in name[code_map]] begin[:]
variable[m] assign[=] call[name[code_map]][name[code]]
if compare[constant[data] in name[m]] begin[:]
name[r].data assign[=] call[name[m]][constant[data]]
if compare[constant[headers] in name[m]] begin[:]
name[r].headers assign[=] call[name[m]][constant[headers]]
return[name[r]] | keyword[def] identifier[status_code] ( identifier[code] ):
literal[string]
identifier[redirect] = identifier[dict] ( identifier[headers] = identifier[dict] ( identifier[location] = identifier[REDIRECT_LOCATION] ))
identifier[code_map] ={
literal[int] : identifier[redirect] ,
literal[int] : identifier[redirect] ,
literal[int] : identifier[redirect] ,
literal[int] : identifier[dict] ( identifier[data] = literal[string] ),
literal[int] : identifier[redirect] ,
literal[int] : identifier[redirect] ,
literal[int] : identifier[dict] ( identifier[headers] ={ literal[string] : literal[string] }),
literal[int] : identifier[dict] (
identifier[data] = literal[string] ,
identifier[headers] ={
literal[string] : literal[string]
}
),
literal[int] : identifier[dict] ( identifier[data] = identifier[json] . identifier[dumps] ({
literal[string] : literal[string] ,
literal[string] : identifier[ACCEPTED_MEDIA_TYPES]
}),
identifier[headers] ={
literal[string] : literal[string]
}),
literal[int] : identifier[dict] ( identifier[headers] ={ literal[string] : literal[string] }),
literal[int] : identifier[dict] (
identifier[data] = identifier[ASCII_ART] ,
identifier[headers] ={
literal[string] : literal[string]
}
),
}
identifier[r] = identifier[make_response] ()
identifier[r] . identifier[status_code] = identifier[code]
keyword[if] identifier[code] keyword[in] identifier[code_map] :
identifier[m] = identifier[code_map] [ identifier[code] ]
keyword[if] literal[string] keyword[in] identifier[m] :
identifier[r] . identifier[data] = identifier[m] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[m] :
identifier[r] . identifier[headers] = identifier[m] [ literal[string] ]
keyword[return] identifier[r] | def status_code(code):
"""Returns response object of given status code."""
redirect = dict(headers=dict(location=REDIRECT_LOCATION)) # I'm a teapot!
code_map = {301: redirect, 302: redirect, 303: redirect, 304: dict(data=''), 305: redirect, 307: redirect, 401: dict(headers={'WWW-Authenticate': 'Basic realm="Fake Realm"'}), 402: dict(data='Fuck you, pay me!', headers={'x-more-info': 'http://vimeo.com/22053820'}), 406: dict(data=json.dumps({'message': 'Client did not request a supported media type.', 'accept': ACCEPTED_MEDIA_TYPES}), headers={'Content-Type': 'application/json'}), 407: dict(headers={'Proxy-Authenticate': 'Basic realm="Fake Realm"'}), 418: dict(data=ASCII_ART, headers={'x-more-info': 'http://tools.ietf.org/html/rfc2324'})}
r = make_response()
r.status_code = code
if code in code_map:
m = code_map[code]
if 'data' in m:
r.data = m['data'] # depends on [control=['if'], data=['m']]
if 'headers' in m:
r.headers = m['headers'] # depends on [control=['if'], data=['m']] # depends on [control=['if'], data=['code', 'code_map']]
return r |
def db_en004(self, value=None):
""" Corresponds to IDD Field `db_en004`
mean coincident dry-bulb temperature to
Enthalpy corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_en004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db_en004`'.format(value))
self._db_en004 = value | def function[db_en004, parameter[self, value]]:
constant[ Corresponds to IDD Field `db_en004`
mean coincident dry-bulb temperature to
Enthalpy corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_en004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0fec760>
name[self]._db_en004 assign[=] name[value] | keyword[def] identifier[db_en004] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_db_en004] = identifier[value] | def db_en004(self, value=None):
""" Corresponds to IDD Field `db_en004`
mean coincident dry-bulb temperature to
Enthalpy corresponding to 0.4% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `db_en004`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `db_en004`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._db_en004 = value |
def create_answer(self, answer_form):
"""Creates a new ``Answer``.
arg: answer_form (osid.assessment.AnswerForm): the form for
this ``Answer``
return: (osid.assessment.Answer) - the new ``Answer``
raise: IllegalState - ``answer_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``answer_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - ``answer_form`` did not originate from
``get_answer_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.create_asset_content_template
from dlkit.abstract_osid.assessment.objects import AnswerForm as ABCAnswerForm
collection = JSONClientValidated('assessment',
collection='Item',
runtime=self._runtime)
if not isinstance(answer_form, ABCAnswerForm):
raise errors.InvalidArgument('argument type is not an AnswerForm')
if answer_form.is_for_update():
raise errors.InvalidArgument('the AnswerForm is for update only, not create')
try:
if self._forms[answer_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('answer_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('answer_form did not originate from this session')
if not answer_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
answer_form._my_map['_id'] = ObjectId()
item_id = Id(answer_form._my_map['itemId']).get_identifier()
item = collection.find_one(
{'$and': [{'_id': ObjectId(item_id)},
{'assigned' + self._catalog_name + 'Ids': {'$in': [str(self._catalog_id)]}}]})
item['answers'].append(answer_form._my_map)
result = collection.save(item)
self._forms[answer_form.get_id().get_identifier()] = CREATED
from .objects import Answer
return Answer(
osid_object_map=answer_form._my_map,
runtime=self._runtime,
proxy=self._proxy) | def function[create_answer, parameter[self, answer_form]]:
constant[Creates a new ``Answer``.
arg: answer_form (osid.assessment.AnswerForm): the form for
this ``Answer``
return: (osid.assessment.Answer) - the new ``Answer``
raise: IllegalState - ``answer_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``answer_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - ``answer_form`` did not originate from
``get_answer_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
]
from relative_module[dlkit.abstract_osid.assessment.objects] import module[AnswerForm]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[assessment]]]
if <ast.UnaryOp object at 0x7da204621960> begin[:]
<ast.Raise object at 0x7da2046236a0>
if call[name[answer_form].is_for_update, parameter[]] begin[:]
<ast.Raise object at 0x7da20c7cbc10>
<ast.Try object at 0x7da20c7c95a0>
if <ast.UnaryOp object at 0x7da20e9540d0> begin[:]
<ast.Raise object at 0x7da20e955210>
call[name[answer_form]._my_map][constant[_id]] assign[=] call[name[ObjectId], parameter[]]
variable[item_id] assign[=] call[call[name[Id], parameter[call[name[answer_form]._my_map][constant[itemId]]]].get_identifier, parameter[]]
variable[item] assign[=] call[name[collection].find_one, parameter[dictionary[[<ast.Constant object at 0x7da20e9567a0>], [<ast.List object at 0x7da20e957580>]]]]
call[call[name[item]][constant[answers]].append, parameter[name[answer_form]._my_map]]
variable[result] assign[=] call[name[collection].save, parameter[name[item]]]
call[name[self]._forms][call[call[name[answer_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] name[CREATED]
from relative_module[objects] import module[Answer]
return[call[name[Answer], parameter[]]] | keyword[def] identifier[create_answer] ( identifier[self] , identifier[answer_form] ):
literal[string]
keyword[from] identifier[dlkit] . identifier[abstract_osid] . identifier[assessment] . identifier[objects] keyword[import] identifier[AnswerForm] keyword[as] identifier[ABCAnswerForm]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[answer_form] , identifier[ABCAnswerForm] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[answer_form] . identifier[is_for_update] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[try] :
keyword[if] identifier[self] . identifier[_forms] [ identifier[answer_form] . identifier[get_id] (). identifier[get_identifier] ()]== identifier[CREATED] :
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[errors] . identifier[Unsupported] ( literal[string] )
keyword[if] keyword[not] identifier[answer_form] . identifier[is_valid] ():
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[answer_form] . identifier[_my_map] [ literal[string] ]= identifier[ObjectId] ()
identifier[item_id] = identifier[Id] ( identifier[answer_form] . identifier[_my_map] [ literal[string] ]). identifier[get_identifier] ()
identifier[item] = identifier[collection] . identifier[find_one] (
{ literal[string] :[{ literal[string] : identifier[ObjectId] ( identifier[item_id] )},
{ literal[string] + identifier[self] . identifier[_catalog_name] + literal[string] :{ literal[string] :[ identifier[str] ( identifier[self] . identifier[_catalog_id] )]}}]})
identifier[item] [ literal[string] ]. identifier[append] ( identifier[answer_form] . identifier[_my_map] )
identifier[result] = identifier[collection] . identifier[save] ( identifier[item] )
identifier[self] . identifier[_forms] [ identifier[answer_form] . identifier[get_id] (). identifier[get_identifier] ()]= identifier[CREATED]
keyword[from] . identifier[objects] keyword[import] identifier[Answer]
keyword[return] identifier[Answer] (
identifier[osid_object_map] = identifier[answer_form] . identifier[_my_map] ,
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[_proxy] ) | def create_answer(self, answer_form):
"""Creates a new ``Answer``.
arg: answer_form (osid.assessment.AnswerForm): the form for
this ``Answer``
return: (osid.assessment.Answer) - the new ``Answer``
raise: IllegalState - ``answer_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``answer_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - ``answer_form`` did not originate from
``get_answer_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.create_asset_content_template
from dlkit.abstract_osid.assessment.objects import AnswerForm as ABCAnswerForm
collection = JSONClientValidated('assessment', collection='Item', runtime=self._runtime)
if not isinstance(answer_form, ABCAnswerForm):
raise errors.InvalidArgument('argument type is not an AnswerForm') # depends on [control=['if'], data=[]]
if answer_form.is_for_update():
raise errors.InvalidArgument('the AnswerForm is for update only, not create') # depends on [control=['if'], data=[]]
try:
if self._forms[answer_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('answer_form already used in a create transaction') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
raise errors.Unsupported('answer_form did not originate from this session') # depends on [control=['except'], data=[]]
if not answer_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid') # depends on [control=['if'], data=[]]
answer_form._my_map['_id'] = ObjectId()
item_id = Id(answer_form._my_map['itemId']).get_identifier()
item = collection.find_one({'$and': [{'_id': ObjectId(item_id)}, {'assigned' + self._catalog_name + 'Ids': {'$in': [str(self._catalog_id)]}}]})
item['answers'].append(answer_form._my_map)
result = collection.save(item)
self._forms[answer_form.get_id().get_identifier()] = CREATED
from .objects import Answer
return Answer(osid_object_map=answer_form._my_map, runtime=self._runtime, proxy=self._proxy) |
def _POTUpdateBuilder(env, **kw):
""" Creates `POTUpdate` builder object """
import SCons.Action
from SCons.Tool.GettextCommon import _POTargetFactory
kw['action'] = SCons.Action.Action(_update_pot_file, None)
kw['suffix'] = '$POTSUFFIX'
kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File
kw['emitter'] = _pot_update_emitter
return _POTBuilder(**kw) | def function[_POTUpdateBuilder, parameter[env]]:
constant[ Creates `POTUpdate` builder object ]
import module[SCons.Action]
from relative_module[SCons.Tool.GettextCommon] import module[_POTargetFactory]
call[name[kw]][constant[action]] assign[=] call[name[SCons].Action.Action, parameter[name[_update_pot_file], constant[None]]]
call[name[kw]][constant[suffix]] assign[=] constant[$POTSUFFIX]
call[name[kw]][constant[target_factory]] assign[=] call[name[_POTargetFactory], parameter[name[env]]].File
call[name[kw]][constant[emitter]] assign[=] name[_pot_update_emitter]
return[call[name[_POTBuilder], parameter[]]] | keyword[def] identifier[_POTUpdateBuilder] ( identifier[env] ,** identifier[kw] ):
literal[string]
keyword[import] identifier[SCons] . identifier[Action]
keyword[from] identifier[SCons] . identifier[Tool] . identifier[GettextCommon] keyword[import] identifier[_POTargetFactory]
identifier[kw] [ literal[string] ]= identifier[SCons] . identifier[Action] . identifier[Action] ( identifier[_update_pot_file] , keyword[None] )
identifier[kw] [ literal[string] ]= literal[string]
identifier[kw] [ literal[string] ]= identifier[_POTargetFactory] ( identifier[env] , identifier[alias] = literal[string] ). identifier[File]
identifier[kw] [ literal[string] ]= identifier[_pot_update_emitter]
keyword[return] identifier[_POTBuilder] (** identifier[kw] ) | def _POTUpdateBuilder(env, **kw):
""" Creates `POTUpdate` builder object """
import SCons.Action
from SCons.Tool.GettextCommon import _POTargetFactory
kw['action'] = SCons.Action.Action(_update_pot_file, None)
kw['suffix'] = '$POTSUFFIX'
kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File
kw['emitter'] = _pot_update_emitter
return _POTBuilder(**kw) |
def __enableProxy(self):
"""
Set the required environment variables to enable the use of hoverfly as a proxy.
"""
os.environ[
"HTTP_PROXY"] = self.httpProxy()
os.environ[
"HTTPS_PROXY"] = self.httpsProxy()
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
"cert.pem") | def function[__enableProxy, parameter[self]]:
constant[
Set the required environment variables to enable the use of hoverfly as a proxy.
]
call[name[os].environ][constant[HTTP_PROXY]] assign[=] call[name[self].httpProxy, parameter[]]
call[name[os].environ][constant[HTTPS_PROXY]] assign[=] call[name[self].httpsProxy, parameter[]]
call[name[os].environ][constant[REQUESTS_CA_BUNDLE]] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[cert.pem]]] | keyword[def] identifier[__enableProxy] ( identifier[self] ):
literal[string]
identifier[os] . identifier[environ] [
literal[string] ]= identifier[self] . identifier[httpProxy] ()
identifier[os] . identifier[environ] [
literal[string] ]= identifier[self] . identifier[httpsProxy] ()
identifier[os] . identifier[environ] [ literal[string] ]= identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] (
identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )),
literal[string] ) | def __enableProxy(self):
"""
Set the required environment variables to enable the use of hoverfly as a proxy.
"""
os.environ['HTTP_PROXY'] = self.httpProxy()
os.environ['HTTPS_PROXY'] = self.httpsProxy()
os.environ['REQUESTS_CA_BUNDLE'] = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'cert.pem') |
def split_type(cls, type_name):
"""Split type of a type name with CardinalityField suffix into its parts.
:param type_name: Type name (as string).
:return: Tuple (type_basename, cardinality)
"""
if cls.matches_type(type_name):
basename = type_name[:-1]
cardinality = cls.from_char_map[type_name[-1]]
else:
# -- ASSUME: Cardinality.one
cardinality = Cardinality.one
basename = type_name
return (basename, cardinality) | def function[split_type, parameter[cls, type_name]]:
constant[Split type of a type name with CardinalityField suffix into its parts.
:param type_name: Type name (as string).
:return: Tuple (type_basename, cardinality)
]
if call[name[cls].matches_type, parameter[name[type_name]]] begin[:]
variable[basename] assign[=] call[name[type_name]][<ast.Slice object at 0x7da1b2458eb0>]
variable[cardinality] assign[=] call[name[cls].from_char_map][call[name[type_name]][<ast.UnaryOp object at 0x7da1b2459a20>]]
return[tuple[[<ast.Name object at 0x7da1b245b0d0>, <ast.Name object at 0x7da1b245aec0>]]] | keyword[def] identifier[split_type] ( identifier[cls] , identifier[type_name] ):
literal[string]
keyword[if] identifier[cls] . identifier[matches_type] ( identifier[type_name] ):
identifier[basename] = identifier[type_name] [:- literal[int] ]
identifier[cardinality] = identifier[cls] . identifier[from_char_map] [ identifier[type_name] [- literal[int] ]]
keyword[else] :
identifier[cardinality] = identifier[Cardinality] . identifier[one]
identifier[basename] = identifier[type_name]
keyword[return] ( identifier[basename] , identifier[cardinality] ) | def split_type(cls, type_name):
"""Split type of a type name with CardinalityField suffix into its parts.
:param type_name: Type name (as string).
:return: Tuple (type_basename, cardinality)
"""
if cls.matches_type(type_name):
basename = type_name[:-1]
cardinality = cls.from_char_map[type_name[-1]] # depends on [control=['if'], data=[]]
else:
# -- ASSUME: Cardinality.one
cardinality = Cardinality.one
basename = type_name
return (basename, cardinality) |
def before_render(self):
"""Before template render hook
"""
# If the current user is a client contact, display those analysis
# requests that belong to same client only
super(AnalysisRequestsView, self).before_render()
client = api.get_current_client()
if client:
self.contentFilter['path'] = {
"query": "/".join(client.getPhysicalPath()),
"level": 0}
# No need to display the Client column
self.remove_column('Client') | def function[before_render, parameter[self]]:
constant[Before template render hook
]
call[call[name[super], parameter[name[AnalysisRequestsView], name[self]]].before_render, parameter[]]
variable[client] assign[=] call[name[api].get_current_client, parameter[]]
if name[client] begin[:]
call[name[self].contentFilter][constant[path]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d38d60>, <ast.Constant object at 0x7da1b1d3ab00>], [<ast.Call object at 0x7da1b1d3aaa0>, <ast.Constant object at 0x7da1b1d3a2f0>]]
call[name[self].remove_column, parameter[constant[Client]]] | keyword[def] identifier[before_render] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[AnalysisRequestsView] , identifier[self] ). identifier[before_render] ()
identifier[client] = identifier[api] . identifier[get_current_client] ()
keyword[if] identifier[client] :
identifier[self] . identifier[contentFilter] [ literal[string] ]={
literal[string] : literal[string] . identifier[join] ( identifier[client] . identifier[getPhysicalPath] ()),
literal[string] : literal[int] }
identifier[self] . identifier[remove_column] ( literal[string] ) | def before_render(self):
"""Before template render hook
"""
# If the current user is a client contact, display those analysis
# requests that belong to same client only
super(AnalysisRequestsView, self).before_render()
client = api.get_current_client()
if client:
self.contentFilter['path'] = {'query': '/'.join(client.getPhysicalPath()), 'level': 0}
# No need to display the Client column
self.remove_column('Client') # depends on [control=['if'], data=[]] |
def _cell_to_python(cell):
"""Convert a PyOpenXL's `Cell` object to the corresponding Python object."""
data_type, value = cell.data_type, cell.value
if type(cell) is EmptyCell:
return None
elif data_type == "f" and value == "=TRUE()":
return True
elif data_type == "f" and value == "=FALSE()":
return False
elif cell.number_format.lower() == "yyyy-mm-dd":
return str(value).split(" 00:00:00")[0]
elif cell.number_format.lower() == "yyyy-mm-dd hh:mm:ss":
return str(value).split(".")[0]
elif cell.number_format.endswith("%") and isinstance(value, Number):
value = Decimal(str(value))
return "{:%}".format(value)
elif value is None:
return ""
else:
return value | def function[_cell_to_python, parameter[cell]]:
constant[Convert a PyOpenXL's `Cell` object to the corresponding Python object.]
<ast.Tuple object at 0x7da18bc71270> assign[=] tuple[[<ast.Attribute object at 0x7da18bc72500>, <ast.Attribute object at 0x7da18bc73dc0>]]
if compare[call[name[type], parameter[name[cell]]] is name[EmptyCell]] begin[:]
return[constant[None]] | keyword[def] identifier[_cell_to_python] ( identifier[cell] ):
literal[string]
identifier[data_type] , identifier[value] = identifier[cell] . identifier[data_type] , identifier[cell] . identifier[value]
keyword[if] identifier[type] ( identifier[cell] ) keyword[is] identifier[EmptyCell] :
keyword[return] keyword[None]
keyword[elif] identifier[data_type] == literal[string] keyword[and] identifier[value] == literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[data_type] == literal[string] keyword[and] identifier[value] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[cell] . identifier[number_format] . identifier[lower] ()== literal[string] :
keyword[return] identifier[str] ( identifier[value] ). identifier[split] ( literal[string] )[ literal[int] ]
keyword[elif] identifier[cell] . identifier[number_format] . identifier[lower] ()== literal[string] :
keyword[return] identifier[str] ( identifier[value] ). identifier[split] ( literal[string] )[ literal[int] ]
keyword[elif] identifier[cell] . identifier[number_format] . identifier[endswith] ( literal[string] ) keyword[and] identifier[isinstance] ( identifier[value] , identifier[Number] ):
identifier[value] = identifier[Decimal] ( identifier[str] ( identifier[value] ))
keyword[return] literal[string] . identifier[format] ( identifier[value] )
keyword[elif] identifier[value] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[value] | def _cell_to_python(cell):
"""Convert a PyOpenXL's `Cell` object to the corresponding Python object."""
(data_type, value) = (cell.data_type, cell.value)
if type(cell) is EmptyCell:
return None # depends on [control=['if'], data=[]]
elif data_type == 'f' and value == '=TRUE()':
return True # depends on [control=['if'], data=[]]
elif data_type == 'f' and value == '=FALSE()':
return False # depends on [control=['if'], data=[]]
elif cell.number_format.lower() == 'yyyy-mm-dd':
return str(value).split(' 00:00:00')[0] # depends on [control=['if'], data=[]]
elif cell.number_format.lower() == 'yyyy-mm-dd hh:mm:ss':
return str(value).split('.')[0] # depends on [control=['if'], data=[]]
elif cell.number_format.endswith('%') and isinstance(value, Number):
value = Decimal(str(value))
return '{:%}'.format(value) # depends on [control=['if'], data=[]]
elif value is None:
return '' # depends on [control=['if'], data=[]]
else:
return value |
def prepare(cls, session, pkgen, items):
"""This is called immediately before the items are written to the
database. pkgen is passed in to allow last-minute resolving of ids.
"""
for item in cls.merge(session, items):
if hasattr(item, "id"):
item.id.resolve(id=pkgen.get(cls), is_new=True)
yield cls.to_dict(item) | def function[prepare, parameter[cls, session, pkgen, items]]:
constant[This is called immediately before the items are written to the
database. pkgen is passed in to allow last-minute resolving of ids.
]
for taget[name[item]] in starred[call[name[cls].merge, parameter[name[session], name[items]]]] begin[:]
if call[name[hasattr], parameter[name[item], constant[id]]] begin[:]
call[name[item].id.resolve, parameter[]]
<ast.Yield object at 0x7da1b1b622c0> | keyword[def] identifier[prepare] ( identifier[cls] , identifier[session] , identifier[pkgen] , identifier[items] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[cls] . identifier[merge] ( identifier[session] , identifier[items] ):
keyword[if] identifier[hasattr] ( identifier[item] , literal[string] ):
identifier[item] . identifier[id] . identifier[resolve] ( identifier[id] = identifier[pkgen] . identifier[get] ( identifier[cls] ), identifier[is_new] = keyword[True] )
keyword[yield] identifier[cls] . identifier[to_dict] ( identifier[item] ) | def prepare(cls, session, pkgen, items):
"""This is called immediately before the items are written to the
database. pkgen is passed in to allow last-minute resolving of ids.
"""
for item in cls.merge(session, items):
if hasattr(item, 'id'):
item.id.resolve(id=pkgen.get(cls), is_new=True) # depends on [control=['if'], data=[]]
yield cls.to_dict(item) # depends on [control=['for'], data=['item']] |
def addPathVariables(self, pathvars):
""" Adds path variables to the pathvars map property"""
if type(pathvars) is dict:
self._pathvars = merge(self._pathvars, pathvars) | def function[addPathVariables, parameter[self, pathvars]]:
constant[ Adds path variables to the pathvars map property]
if compare[call[name[type], parameter[name[pathvars]]] is name[dict]] begin[:]
name[self]._pathvars assign[=] call[name[merge], parameter[name[self]._pathvars, name[pathvars]]] | keyword[def] identifier[addPathVariables] ( identifier[self] , identifier[pathvars] ):
literal[string]
keyword[if] identifier[type] ( identifier[pathvars] ) keyword[is] identifier[dict] :
identifier[self] . identifier[_pathvars] = identifier[merge] ( identifier[self] . identifier[_pathvars] , identifier[pathvars] ) | def addPathVariables(self, pathvars):
""" Adds path variables to the pathvars map property"""
if type(pathvars) is dict:
self._pathvars = merge(self._pathvars, pathvars) # depends on [control=['if'], data=[]] |
def _round_robin_write(writers, generator):
"""Write records from generator round-robin across writers."""
for i, example in enumerate(utils.tqdm(
generator, unit=" examples", leave=False)):
writers[i % len(writers)].write(example) | def function[_round_robin_write, parameter[writers, generator]]:
constant[Write records from generator round-robin across writers.]
for taget[tuple[[<ast.Name object at 0x7da1b20631f0>, <ast.Name object at 0x7da1b2061a80>]]] in starred[call[name[enumerate], parameter[call[name[utils].tqdm, parameter[name[generator]]]]]] begin[:]
call[call[name[writers]][binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[writers]]]]].write, parameter[name[example]]] | keyword[def] identifier[_round_robin_write] ( identifier[writers] , identifier[generator] ):
literal[string]
keyword[for] identifier[i] , identifier[example] keyword[in] identifier[enumerate] ( identifier[utils] . identifier[tqdm] (
identifier[generator] , identifier[unit] = literal[string] , identifier[leave] = keyword[False] )):
identifier[writers] [ identifier[i] % identifier[len] ( identifier[writers] )]. identifier[write] ( identifier[example] ) | def _round_robin_write(writers, generator):
"""Write records from generator round-robin across writers."""
for (i, example) in enumerate(utils.tqdm(generator, unit=' examples', leave=False)):
writers[i % len(writers)].write(example) # depends on [control=['for'], data=[]] |
def handle_stream(msg):
"""Process a stream type message."""
if not msg.gateway.is_sensor(msg.node_id):
return None
stream = msg.gateway.const.Stream(msg.sub_type)
handler = stream.get_handler(msg.gateway.handlers)
if handler is None:
return None
return handler(msg) | def function[handle_stream, parameter[msg]]:
constant[Process a stream type message.]
if <ast.UnaryOp object at 0x7da20e9540a0> begin[:]
return[constant[None]]
variable[stream] assign[=] call[name[msg].gateway.const.Stream, parameter[name[msg].sub_type]]
variable[handler] assign[=] call[name[stream].get_handler, parameter[name[msg].gateway.handlers]]
if compare[name[handler] is constant[None]] begin[:]
return[constant[None]]
return[call[name[handler], parameter[name[msg]]]] | keyword[def] identifier[handle_stream] ( identifier[msg] ):
literal[string]
keyword[if] keyword[not] identifier[msg] . identifier[gateway] . identifier[is_sensor] ( identifier[msg] . identifier[node_id] ):
keyword[return] keyword[None]
identifier[stream] = identifier[msg] . identifier[gateway] . identifier[const] . identifier[Stream] ( identifier[msg] . identifier[sub_type] )
identifier[handler] = identifier[stream] . identifier[get_handler] ( identifier[msg] . identifier[gateway] . identifier[handlers] )
keyword[if] identifier[handler] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[handler] ( identifier[msg] ) | def handle_stream(msg):
"""Process a stream type message."""
if not msg.gateway.is_sensor(msg.node_id):
return None # depends on [control=['if'], data=[]]
stream = msg.gateway.const.Stream(msg.sub_type)
handler = stream.get_handler(msg.gateway.handlers)
if handler is None:
return None # depends on [control=['if'], data=[]]
return handler(msg) |
def update_loggedoutflow_v1(self):
"""Log a new entry of discharge at a cross section far downstream.
Required control parameter:
|NmbLogEntries|
Required flux sequence:
|Outflow|
Calculated flux sequence:
|LoggedOutflow|
Example:
The following example shows that, with each new method call, the
three memorized values are successively moved to the right and the
respective new value is stored on the bare left position:
>>> from hydpy.models.dam import *
>>> parameterstep()
>>> nmblogentries(3)
>>> logs.loggedoutflow = 0.0
>>> from hydpy import UnitTest
>>> test = UnitTest(model, model.update_loggedoutflow_v1,
... last_example=4,
... parseqs=(fluxes.outflow,
... logs.loggedoutflow))
>>> test.nexts.outflow = [1.0, 3.0, 2.0, 4.0]
>>> del test.inits.loggedoutflow
>>> test()
| ex. | outflow | loggedoutflow |
-------------------------------------------
| 1 | 1.0 | 1.0 0.0 0.0 |
| 2 | 3.0 | 3.0 1.0 0.0 |
| 3 | 2.0 | 2.0 3.0 1.0 |
| 4 | 4.0 | 4.0 2.0 3.0 |
"""
con = self.parameters.control.fastaccess
flu = self.sequences.fluxes.fastaccess
log = self.sequences.logs.fastaccess
for idx in range(con.nmblogentries-1, 0, -1):
log.loggedoutflow[idx] = log.loggedoutflow[idx-1]
log.loggedoutflow[0] = flu.outflow | def function[update_loggedoutflow_v1, parameter[self]]:
constant[Log a new entry of discharge at a cross section far downstream.
Required control parameter:
|NmbLogEntries|
Required flux sequence:
|Outflow|
Calculated flux sequence:
|LoggedOutflow|
Example:
The following example shows that, with each new method call, the
three memorized values are successively moved to the right and the
respective new value is stored on the bare left position:
>>> from hydpy.models.dam import *
>>> parameterstep()
>>> nmblogentries(3)
>>> logs.loggedoutflow = 0.0
>>> from hydpy import UnitTest
>>> test = UnitTest(model, model.update_loggedoutflow_v1,
... last_example=4,
... parseqs=(fluxes.outflow,
... logs.loggedoutflow))
>>> test.nexts.outflow = [1.0, 3.0, 2.0, 4.0]
>>> del test.inits.loggedoutflow
>>> test()
| ex. | outflow | loggedoutflow |
-------------------------------------------
| 1 | 1.0 | 1.0 0.0 0.0 |
| 2 | 3.0 | 3.0 1.0 0.0 |
| 3 | 2.0 | 2.0 3.0 1.0 |
| 4 | 4.0 | 4.0 2.0 3.0 |
]
variable[con] assign[=] name[self].parameters.control.fastaccess
variable[flu] assign[=] name[self].sequences.fluxes.fastaccess
variable[log] assign[=] name[self].sequences.logs.fastaccess
for taget[name[idx]] in starred[call[name[range], parameter[binary_operation[name[con].nmblogentries - constant[1]], constant[0], <ast.UnaryOp object at 0x7da2041d9750>]]] begin[:]
call[name[log].loggedoutflow][name[idx]] assign[=] call[name[log].loggedoutflow][binary_operation[name[idx] - constant[1]]]
call[name[log].loggedoutflow][constant[0]] assign[=] name[flu].outflow | keyword[def] identifier[update_loggedoutflow_v1] ( identifier[self] ):
literal[string]
identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess]
identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess]
identifier[log] = identifier[self] . identifier[sequences] . identifier[logs] . identifier[fastaccess]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[con] . identifier[nmblogentries] - literal[int] , literal[int] ,- literal[int] ):
identifier[log] . identifier[loggedoutflow] [ identifier[idx] ]= identifier[log] . identifier[loggedoutflow] [ identifier[idx] - literal[int] ]
identifier[log] . identifier[loggedoutflow] [ literal[int] ]= identifier[flu] . identifier[outflow] | def update_loggedoutflow_v1(self):
"""Log a new entry of discharge at a cross section far downstream.
Required control parameter:
|NmbLogEntries|
Required flux sequence:
|Outflow|
Calculated flux sequence:
|LoggedOutflow|
Example:
The following example shows that, with each new method call, the
three memorized values are successively moved to the right and the
respective new value is stored on the bare left position:
>>> from hydpy.models.dam import *
>>> parameterstep()
>>> nmblogentries(3)
>>> logs.loggedoutflow = 0.0
>>> from hydpy import UnitTest
>>> test = UnitTest(model, model.update_loggedoutflow_v1,
... last_example=4,
... parseqs=(fluxes.outflow,
... logs.loggedoutflow))
>>> test.nexts.outflow = [1.0, 3.0, 2.0, 4.0]
>>> del test.inits.loggedoutflow
>>> test()
| ex. | outflow | loggedoutflow |
-------------------------------------------
| 1 | 1.0 | 1.0 0.0 0.0 |
| 2 | 3.0 | 3.0 1.0 0.0 |
| 3 | 2.0 | 2.0 3.0 1.0 |
| 4 | 4.0 | 4.0 2.0 3.0 |
"""
con = self.parameters.control.fastaccess
flu = self.sequences.fluxes.fastaccess
log = self.sequences.logs.fastaccess
for idx in range(con.nmblogentries - 1, 0, -1):
log.loggedoutflow[idx] = log.loggedoutflow[idx - 1] # depends on [control=['for'], data=['idx']]
log.loggedoutflow[0] = flu.outflow |
def get_path_from_query_string(req):
"""Gets path from query string
Args:
req (flask.request): Request object from Flask
Returns:
path (str): Value of "path" parameter from query string
Raises:
exceptions.UserError: If "path" is not found in query string
"""
if req.args.get('path') is None:
raise exceptions.UserError('Path not found in query string')
return req.args.get('path') | def function[get_path_from_query_string, parameter[req]]:
constant[Gets path from query string
Args:
req (flask.request): Request object from Flask
Returns:
path (str): Value of "path" parameter from query string
Raises:
exceptions.UserError: If "path" is not found in query string
]
if compare[call[name[req].args.get, parameter[constant[path]]] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f721630>
return[call[name[req].args.get, parameter[constant[path]]]] | keyword[def] identifier[get_path_from_query_string] ( identifier[req] ):
literal[string]
keyword[if] identifier[req] . identifier[args] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[UserError] ( literal[string] )
keyword[return] identifier[req] . identifier[args] . identifier[get] ( literal[string] ) | def get_path_from_query_string(req):
"""Gets path from query string
Args:
req (flask.request): Request object from Flask
Returns:
path (str): Value of "path" parameter from query string
Raises:
exceptions.UserError: If "path" is not found in query string
"""
if req.args.get('path') is None:
raise exceptions.UserError('Path not found in query string') # depends on [control=['if'], data=[]]
return req.args.get('path') |
def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_="histbars")
points = serie.points
for i, (y, x0, x1) in enumerate(points):
if None in (x0, x1, y) or (self.logarithmic and y <= 0):
continue
metadata = serie.metadata.get(i)
bar = decorate(
self.svg, self.svg.node(bars, class_='histbar'), metadata
)
val = self._format(serie, i)
bounds = self._bar(
serie, bar, x0, x1, y, i, self.zero, secondary=rescale
)
self._tooltip_and_print_values(
serie_node, serie, bar, i, val, metadata, *bounds
) | def function[bar, parameter[self, serie, rescale]]:
constant[Draw a bar graph for a serie]
variable[serie_node] assign[=] call[name[self].svg.serie, parameter[name[serie]]]
variable[bars] assign[=] call[name[self].svg.node, parameter[call[name[serie_node]][constant[plot]]]]
variable[points] assign[=] name[serie].points
for taget[tuple[[<ast.Name object at 0x7da20c6ab280>, <ast.Tuple object at 0x7da20c6aa4a0>]]] in starred[call[name[enumerate], parameter[name[points]]]] begin[:]
if <ast.BoolOp object at 0x7da20c6a8e50> begin[:]
continue
variable[metadata] assign[=] call[name[serie].metadata.get, parameter[name[i]]]
variable[bar] assign[=] call[name[decorate], parameter[name[self].svg, call[name[self].svg.node, parameter[name[bars]]], name[metadata]]]
variable[val] assign[=] call[name[self]._format, parameter[name[serie], name[i]]]
variable[bounds] assign[=] call[name[self]._bar, parameter[name[serie], name[bar], name[x0], name[x1], name[y], name[i], name[self].zero]]
call[name[self]._tooltip_and_print_values, parameter[name[serie_node], name[serie], name[bar], name[i], name[val], name[metadata], <ast.Starred object at 0x7da20c6aa1d0>]] | keyword[def] identifier[bar] ( identifier[self] , identifier[serie] , identifier[rescale] = keyword[False] ):
literal[string]
identifier[serie_node] = identifier[self] . identifier[svg] . identifier[serie] ( identifier[serie] )
identifier[bars] = identifier[self] . identifier[svg] . identifier[node] ( identifier[serie_node] [ literal[string] ], identifier[class_] = literal[string] )
identifier[points] = identifier[serie] . identifier[points]
keyword[for] identifier[i] ,( identifier[y] , identifier[x0] , identifier[x1] ) keyword[in] identifier[enumerate] ( identifier[points] ):
keyword[if] keyword[None] keyword[in] ( identifier[x0] , identifier[x1] , identifier[y] ) keyword[or] ( identifier[self] . identifier[logarithmic] keyword[and] identifier[y] <= literal[int] ):
keyword[continue]
identifier[metadata] = identifier[serie] . identifier[metadata] . identifier[get] ( identifier[i] )
identifier[bar] = identifier[decorate] (
identifier[self] . identifier[svg] , identifier[self] . identifier[svg] . identifier[node] ( identifier[bars] , identifier[class_] = literal[string] ), identifier[metadata]
)
identifier[val] = identifier[self] . identifier[_format] ( identifier[serie] , identifier[i] )
identifier[bounds] = identifier[self] . identifier[_bar] (
identifier[serie] , identifier[bar] , identifier[x0] , identifier[x1] , identifier[y] , identifier[i] , identifier[self] . identifier[zero] , identifier[secondary] = identifier[rescale]
)
identifier[self] . identifier[_tooltip_and_print_values] (
identifier[serie_node] , identifier[serie] , identifier[bar] , identifier[i] , identifier[val] , identifier[metadata] ,* identifier[bounds]
) | def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_='histbars')
points = serie.points
for (i, (y, x0, x1)) in enumerate(points):
if None in (x0, x1, y) or (self.logarithmic and y <= 0):
continue # depends on [control=['if'], data=[]]
metadata = serie.metadata.get(i)
bar = decorate(self.svg, self.svg.node(bars, class_='histbar'), metadata)
val = self._format(serie, i)
bounds = self._bar(serie, bar, x0, x1, y, i, self.zero, secondary=rescale)
self._tooltip_and_print_values(serie_node, serie, bar, i, val, metadata, *bounds) # depends on [control=['for'], data=[]] |
def build_input_map(protomap, inputs):
"""Builds a map to feed tensors in `protomap` using `inputs`.
Args:
protomap: A proto map<string,TensorInfo>.
inputs: A map with same keys as `protomap` of Tensors and SparseTensors.
Returns:
A map from nodes refered by TensorInfo protos to corresponding input
tensors.
Raises:
ValueError: if a TensorInfo proto is malformed or map keys do not match.
"""
if set(protomap.keys()) != set(inputs.keys()):
raise ValueError("build_input_map: keys do not match.")
input_map = {}
for key, tensor_info in protomap.items():
arg = inputs[key]
encoding = tensor_info.WhichOneof("encoding")
if encoding == "name":
input_map[tensor_info.name] = arg
elif encoding == "coo_sparse":
coo_sparse = tensor_info.coo_sparse
input_map[coo_sparse.values_tensor_name] = arg.values
input_map[coo_sparse.indices_tensor_name] = arg.indices
input_map[coo_sparse.dense_shape_tensor_name] = arg.dense_shape
else:
raise ValueError("Invalid TensorInfo.encoding: %s" % encoding)
return input_map | def function[build_input_map, parameter[protomap, inputs]]:
constant[Builds a map to feed tensors in `protomap` using `inputs`.
Args:
protomap: A proto map<string,TensorInfo>.
inputs: A map with same keys as `protomap` of Tensors and SparseTensors.
Returns:
A map from nodes refered by TensorInfo protos to corresponding input
tensors.
Raises:
ValueError: if a TensorInfo proto is malformed or map keys do not match.
]
if compare[call[name[set], parameter[call[name[protomap].keys, parameter[]]]] not_equal[!=] call[name[set], parameter[call[name[inputs].keys, parameter[]]]]] begin[:]
<ast.Raise object at 0x7da1b2010a30>
variable[input_map] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2011720>, <ast.Name object at 0x7da1b1f1bc10>]]] in starred[call[name[protomap].items, parameter[]]] begin[:]
variable[arg] assign[=] call[name[inputs]][name[key]]
variable[encoding] assign[=] call[name[tensor_info].WhichOneof, parameter[constant[encoding]]]
if compare[name[encoding] equal[==] constant[name]] begin[:]
call[name[input_map]][name[tensor_info].name] assign[=] name[arg]
return[name[input_map]] | keyword[def] identifier[build_input_map] ( identifier[protomap] , identifier[inputs] ):
literal[string]
keyword[if] identifier[set] ( identifier[protomap] . identifier[keys] ())!= identifier[set] ( identifier[inputs] . identifier[keys] ()):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[input_map] ={}
keyword[for] identifier[key] , identifier[tensor_info] keyword[in] identifier[protomap] . identifier[items] ():
identifier[arg] = identifier[inputs] [ identifier[key] ]
identifier[encoding] = identifier[tensor_info] . identifier[WhichOneof] ( literal[string] )
keyword[if] identifier[encoding] == literal[string] :
identifier[input_map] [ identifier[tensor_info] . identifier[name] ]= identifier[arg]
keyword[elif] identifier[encoding] == literal[string] :
identifier[coo_sparse] = identifier[tensor_info] . identifier[coo_sparse]
identifier[input_map] [ identifier[coo_sparse] . identifier[values_tensor_name] ]= identifier[arg] . identifier[values]
identifier[input_map] [ identifier[coo_sparse] . identifier[indices_tensor_name] ]= identifier[arg] . identifier[indices]
identifier[input_map] [ identifier[coo_sparse] . identifier[dense_shape_tensor_name] ]= identifier[arg] . identifier[dense_shape]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[encoding] )
keyword[return] identifier[input_map] | def build_input_map(protomap, inputs):
"""Builds a map to feed tensors in `protomap` using `inputs`.
Args:
protomap: A proto map<string,TensorInfo>.
inputs: A map with same keys as `protomap` of Tensors and SparseTensors.
Returns:
A map from nodes refered by TensorInfo protos to corresponding input
tensors.
Raises:
ValueError: if a TensorInfo proto is malformed or map keys do not match.
"""
if set(protomap.keys()) != set(inputs.keys()):
raise ValueError('build_input_map: keys do not match.') # depends on [control=['if'], data=[]]
input_map = {}
for (key, tensor_info) in protomap.items():
arg = inputs[key]
encoding = tensor_info.WhichOneof('encoding')
if encoding == 'name':
input_map[tensor_info.name] = arg # depends on [control=['if'], data=[]]
elif encoding == 'coo_sparse':
coo_sparse = tensor_info.coo_sparse
input_map[coo_sparse.values_tensor_name] = arg.values
input_map[coo_sparse.indices_tensor_name] = arg.indices
input_map[coo_sparse.dense_shape_tensor_name] = arg.dense_shape # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid TensorInfo.encoding: %s' % encoding) # depends on [control=['for'], data=[]]
return input_map |
def render_core(url_prefix, auth_header, resources):
"""Generate res.core.js"""
code = ''
code += "function(root, init) {\n"
code += " var q = init('%(auth_header)s', '%(url_prefix)s');\n" %\
{'url_prefix': url_prefix, 'auth_header': auth_header}
code += " var r = null;\n"
for key in resources:
code += " r = root.%(key)s = {};\n" % {'key': key}
for action, item in resources[key].items():
code += " r.%(action)s = q('%(url)s', '%(method)s');\n" %\
{'action': action,
'url': item['url'],
'method': item['method']}
code += "}"
return code | def function[render_core, parameter[url_prefix, auth_header, resources]]:
constant[Generate res.core.js]
variable[code] assign[=] constant[]
<ast.AugAssign object at 0x7da1b2662440>
<ast.AugAssign object at 0x7da1b2661870>
<ast.AugAssign object at 0x7da1b26619c0>
for taget[name[key]] in starred[name[resources]] begin[:]
<ast.AugAssign object at 0x7da1b26629e0>
for taget[tuple[[<ast.Name object at 0x7da1b2662b60>, <ast.Name object at 0x7da1b26616c0>]]] in starred[call[call[name[resources]][name[key]].items, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b26626e0>
<ast.AugAssign object at 0x7da1b2658460>
return[name[code]] | keyword[def] identifier[render_core] ( identifier[url_prefix] , identifier[auth_header] , identifier[resources] ):
literal[string]
identifier[code] = literal[string]
identifier[code] += literal[string]
identifier[code] += literal[string] %{ literal[string] : identifier[url_prefix] , literal[string] : identifier[auth_header] }
identifier[code] += literal[string]
keyword[for] identifier[key] keyword[in] identifier[resources] :
identifier[code] += literal[string] %{ literal[string] : identifier[key] }
keyword[for] identifier[action] , identifier[item] keyword[in] identifier[resources] [ identifier[key] ]. identifier[items] ():
identifier[code] += literal[string] %{ literal[string] : identifier[action] ,
literal[string] : identifier[item] [ literal[string] ],
literal[string] : identifier[item] [ literal[string] ]}
identifier[code] += literal[string]
keyword[return] identifier[code] | def render_core(url_prefix, auth_header, resources):
"""Generate res.core.js"""
code = ''
code += 'function(root, init) {\n'
code += " var q = init('%(auth_header)s', '%(url_prefix)s');\n" % {'url_prefix': url_prefix, 'auth_header': auth_header}
code += ' var r = null;\n'
for key in resources:
code += ' r = root.%(key)s = {};\n' % {'key': key}
for (action, item) in resources[key].items():
code += " r.%(action)s = q('%(url)s', '%(method)s');\n" % {'action': action, 'url': item['url'], 'method': item['method']} # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['key']]
code += '}'
return code |
def escape( text, newline=False ):
"""Escape special html characters."""
if isinstance( text, basestring ):
if '&' in text:
text = text.replace( '&', '&' )
if '>' in text:
text = text.replace( '>', '>' )
if '<' in text:
text = text.replace( '<', '<' )
if '\"' in text:
text = text.replace( '\"', '"' )
if '\'' in text:
text = text.replace( '\'', '"' )
if newline:
if '\n' in text:
text = text.replace( '\n', '<br>' )
return text | def function[escape, parameter[text, newline]]:
constant[Escape special html characters.]
if call[name[isinstance], parameter[name[text], name[basestring]]] begin[:]
if compare[constant[&] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant[&], constant[&]]]
if compare[constant[>] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant[>], constant[>]]]
if compare[constant[<] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant[<], constant[<]]]
if compare[constant["] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant["], constant["]]]
if compare[constant['] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant['], constant["]]]
if name[newline] begin[:]
if compare[constant[
] in name[text]] begin[:]
variable[text] assign[=] call[name[text].replace, parameter[constant[
], constant[<br>]]]
return[name[text]] | keyword[def] identifier[escape] ( identifier[text] , identifier[newline] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[text] , identifier[basestring] ):
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[newline] :
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[text] | def escape(text, newline=False):
"""Escape special html characters."""
if isinstance(text, basestring):
if '&' in text:
text = text.replace('&', '&') # depends on [control=['if'], data=['text']]
if '>' in text:
text = text.replace('>', '>') # depends on [control=['if'], data=['text']]
if '<' in text:
text = text.replace('<', '<') # depends on [control=['if'], data=['text']]
if '"' in text:
text = text.replace('"', '"') # depends on [control=['if'], data=['text']]
if "'" in text:
text = text.replace("'", '"') # depends on [control=['if'], data=['text']]
if newline:
if '\n' in text:
text = text.replace('\n', '<br>') # depends on [control=['if'], data=['text']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return text |
def make_parser_with_config_adder(parser, config):
"""factory function for a smarter parser:
return an utility function that pull default from the config as well.
Pull the default for parser not only from the ``default`` kwarg,
but also if an identical value is find in ``config`` where leading
``--`` or ``--no`` is removed.
If the option is a boolean flag, automatically register an opposite,
exclusive option by prepending or removing the `--no-`. This is useful
to overwrite config in ``.travis.yml``
Mutate the config object and remove know keys in order to detect unused
options afterwoard.
"""
def internal(arg, **kwargs):
invert = {
'store_true':'store_false',
'store_false':'store_true',
}
if arg.startswith('--no-'):
key = arg[5:]
else:
key = arg[2:]
if 'default' in kwargs:
if key in config:
kwargs['default'] = config[key]
del config[key]
action = kwargs.get('action')
if action in invert:
exclusive_grp = parser.add_mutually_exclusive_group()
exclusive_grp.add_argument(arg, **kwargs)
kwargs['action'] = invert[action]
kwargs['help'] = 'Inverse of "%s"' % arg
if arg.startswith('--no-'):
arg = '--%s' % arg[5:]
else:
arg = '--no-%s' % arg[2:]
exclusive_grp.add_argument(arg, **kwargs)
else:
parser.add_argument(arg, **kwargs)
return internal | def function[make_parser_with_config_adder, parameter[parser, config]]:
constant[factory function for a smarter parser:
return an utility function that pull default from the config as well.
Pull the default for parser not only from the ``default`` kwarg,
but also if an identical value is find in ``config`` where leading
``--`` or ``--no`` is removed.
If the option is a boolean flag, automatically register an opposite,
exclusive option by prepending or removing the `--no-`. This is useful
to overwrite config in ``.travis.yml``
Mutate the config object and remove know keys in order to detect unused
options afterwoard.
]
def function[internal, parameter[arg]]:
variable[invert] assign[=] dictionary[[<ast.Constant object at 0x7da1b106d870>, <ast.Constant object at 0x7da1b106d4e0>], [<ast.Constant object at 0x7da1b106c4c0>, <ast.Constant object at 0x7da1b106d930>]]
if call[name[arg].startswith, parameter[constant[--no-]]] begin[:]
variable[key] assign[=] call[name[arg]][<ast.Slice object at 0x7da1b106d2d0>]
if compare[constant[default] in name[kwargs]] begin[:]
if compare[name[key] in name[config]] begin[:]
call[name[kwargs]][constant[default]] assign[=] call[name[config]][name[key]]
<ast.Delete object at 0x7da1b106c2e0>
variable[action] assign[=] call[name[kwargs].get, parameter[constant[action]]]
if compare[name[action] in name[invert]] begin[:]
variable[exclusive_grp] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]]
call[name[exclusive_grp].add_argument, parameter[name[arg]]]
call[name[kwargs]][constant[action]] assign[=] call[name[invert]][name[action]]
call[name[kwargs]][constant[help]] assign[=] binary_operation[constant[Inverse of "%s"] <ast.Mod object at 0x7da2590d6920> name[arg]]
if call[name[arg].startswith, parameter[constant[--no-]]] begin[:]
variable[arg] assign[=] binary_operation[constant[--%s] <ast.Mod object at 0x7da2590d6920> call[name[arg]][<ast.Slice object at 0x7da1b103b130>]]
call[name[exclusive_grp].add_argument, parameter[name[arg]]]
return[name[internal]] | keyword[def] identifier[make_parser_with_config_adder] ( identifier[parser] , identifier[config] ):
literal[string]
keyword[def] identifier[internal] ( identifier[arg] ,** identifier[kwargs] ):
identifier[invert] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[arg] . identifier[startswith] ( literal[string] ):
identifier[key] = identifier[arg] [ literal[int] :]
keyword[else] :
identifier[key] = identifier[arg] [ literal[int] :]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[if] identifier[key] keyword[in] identifier[config] :
identifier[kwargs] [ literal[string] ]= identifier[config] [ identifier[key] ]
keyword[del] identifier[config] [ identifier[key] ]
identifier[action] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[action] keyword[in] identifier[invert] :
identifier[exclusive_grp] = identifier[parser] . identifier[add_mutually_exclusive_group] ()
identifier[exclusive_grp] . identifier[add_argument] ( identifier[arg] ,** identifier[kwargs] )
identifier[kwargs] [ literal[string] ]= identifier[invert] [ identifier[action] ]
identifier[kwargs] [ literal[string] ]= literal[string] % identifier[arg]
keyword[if] identifier[arg] . identifier[startswith] ( literal[string] ):
identifier[arg] = literal[string] % identifier[arg] [ literal[int] :]
keyword[else] :
identifier[arg] = literal[string] % identifier[arg] [ literal[int] :]
identifier[exclusive_grp] . identifier[add_argument] ( identifier[arg] ,** identifier[kwargs] )
keyword[else] :
identifier[parser] . identifier[add_argument] ( identifier[arg] ,** identifier[kwargs] )
keyword[return] identifier[internal] | def make_parser_with_config_adder(parser, config):
"""factory function for a smarter parser:
return an utility function that pull default from the config as well.
Pull the default for parser not only from the ``default`` kwarg,
but also if an identical value is find in ``config`` where leading
``--`` or ``--no`` is removed.
If the option is a boolean flag, automatically register an opposite,
exclusive option by prepending or removing the `--no-`. This is useful
to overwrite config in ``.travis.yml``
Mutate the config object and remove know keys in order to detect unused
options afterwoard.
"""
def internal(arg, **kwargs):
invert = {'store_true': 'store_false', 'store_false': 'store_true'}
if arg.startswith('--no-'):
key = arg[5:] # depends on [control=['if'], data=[]]
else:
key = arg[2:]
if 'default' in kwargs:
if key in config:
kwargs['default'] = config[key]
del config[key] # depends on [control=['if'], data=['key', 'config']] # depends on [control=['if'], data=['kwargs']]
action = kwargs.get('action')
if action in invert:
exclusive_grp = parser.add_mutually_exclusive_group()
exclusive_grp.add_argument(arg, **kwargs)
kwargs['action'] = invert[action]
kwargs['help'] = 'Inverse of "%s"' % arg
if arg.startswith('--no-'):
arg = '--%s' % arg[5:] # depends on [control=['if'], data=[]]
else:
arg = '--no-%s' % arg[2:]
exclusive_grp.add_argument(arg, **kwargs) # depends on [control=['if'], data=['action', 'invert']]
else:
parser.add_argument(arg, **kwargs)
return internal |
def to_timestamp(self, freq=None, how='start'):
"""
Cast to DatetimeArray/Index.
Parameters
----------
freq : string or DateOffset, optional
Target frequency. The default is 'D' for week or longer,
'S' otherwise
how : {'s', 'e', 'start', 'end'}
Returns
-------
DatetimeArray/Index
"""
from pandas.core.arrays import DatetimeArray
how = libperiod._validate_end_alias(how)
end = how == 'E'
if end:
if freq == 'B':
# roll forward to ensure we land on B date
adjust = Timedelta(1, 'D') - Timedelta(1, 'ns')
return self.to_timestamp(how='start') + adjust
else:
adjust = Timedelta(1, 'ns')
return (self + self.freq).to_timestamp(how='start') - adjust
if freq is None:
base, mult = libfrequencies.get_freq_code(self.freq)
freq = libfrequencies.get_to_timestamp_base(base)
else:
freq = Period._maybe_convert_freq(freq)
base, mult = libfrequencies.get_freq_code(freq)
new_data = self.asfreq(freq, how=how)
new_data = libperiod.periodarr_to_dt64arr(new_data.asi8, base)
return DatetimeArray._from_sequence(new_data, freq='infer') | def function[to_timestamp, parameter[self, freq, how]]:
constant[
Cast to DatetimeArray/Index.
Parameters
----------
freq : string or DateOffset, optional
Target frequency. The default is 'D' for week or longer,
'S' otherwise
how : {'s', 'e', 'start', 'end'}
Returns
-------
DatetimeArray/Index
]
from relative_module[pandas.core.arrays] import module[DatetimeArray]
variable[how] assign[=] call[name[libperiod]._validate_end_alias, parameter[name[how]]]
variable[end] assign[=] compare[name[how] equal[==] constant[E]]
if name[end] begin[:]
if compare[name[freq] equal[==] constant[B]] begin[:]
variable[adjust] assign[=] binary_operation[call[name[Timedelta], parameter[constant[1], constant[D]]] - call[name[Timedelta], parameter[constant[1], constant[ns]]]]
return[binary_operation[call[name[self].to_timestamp, parameter[]] + name[adjust]]]
if compare[name[freq] is constant[None]] begin[:]
<ast.Tuple object at 0x7da20c6c7f70> assign[=] call[name[libfrequencies].get_freq_code, parameter[name[self].freq]]
variable[freq] assign[=] call[name[libfrequencies].get_to_timestamp_base, parameter[name[base]]]
<ast.Tuple object at 0x7da18dc07670> assign[=] call[name[libfrequencies].get_freq_code, parameter[name[freq]]]
variable[new_data] assign[=] call[name[self].asfreq, parameter[name[freq]]]
variable[new_data] assign[=] call[name[libperiod].periodarr_to_dt64arr, parameter[name[new_data].asi8, name[base]]]
return[call[name[DatetimeArray]._from_sequence, parameter[name[new_data]]]] | keyword[def] identifier[to_timestamp] ( identifier[self] , identifier[freq] = keyword[None] , identifier[how] = literal[string] ):
literal[string]
keyword[from] identifier[pandas] . identifier[core] . identifier[arrays] keyword[import] identifier[DatetimeArray]
identifier[how] = identifier[libperiod] . identifier[_validate_end_alias] ( identifier[how] )
identifier[end] = identifier[how] == literal[string]
keyword[if] identifier[end] :
keyword[if] identifier[freq] == literal[string] :
identifier[adjust] = identifier[Timedelta] ( literal[int] , literal[string] )- identifier[Timedelta] ( literal[int] , literal[string] )
keyword[return] identifier[self] . identifier[to_timestamp] ( identifier[how] = literal[string] )+ identifier[adjust]
keyword[else] :
identifier[adjust] = identifier[Timedelta] ( literal[int] , literal[string] )
keyword[return] ( identifier[self] + identifier[self] . identifier[freq] ). identifier[to_timestamp] ( identifier[how] = literal[string] )- identifier[adjust]
keyword[if] identifier[freq] keyword[is] keyword[None] :
identifier[base] , identifier[mult] = identifier[libfrequencies] . identifier[get_freq_code] ( identifier[self] . identifier[freq] )
identifier[freq] = identifier[libfrequencies] . identifier[get_to_timestamp_base] ( identifier[base] )
keyword[else] :
identifier[freq] = identifier[Period] . identifier[_maybe_convert_freq] ( identifier[freq] )
identifier[base] , identifier[mult] = identifier[libfrequencies] . identifier[get_freq_code] ( identifier[freq] )
identifier[new_data] = identifier[self] . identifier[asfreq] ( identifier[freq] , identifier[how] = identifier[how] )
identifier[new_data] = identifier[libperiod] . identifier[periodarr_to_dt64arr] ( identifier[new_data] . identifier[asi8] , identifier[base] )
keyword[return] identifier[DatetimeArray] . identifier[_from_sequence] ( identifier[new_data] , identifier[freq] = literal[string] ) | def to_timestamp(self, freq=None, how='start'):
"""
Cast to DatetimeArray/Index.
Parameters
----------
freq : string or DateOffset, optional
Target frequency. The default is 'D' for week or longer,
'S' otherwise
how : {'s', 'e', 'start', 'end'}
Returns
-------
DatetimeArray/Index
"""
from pandas.core.arrays import DatetimeArray
how = libperiod._validate_end_alias(how)
end = how == 'E'
if end:
if freq == 'B':
# roll forward to ensure we land on B date
adjust = Timedelta(1, 'D') - Timedelta(1, 'ns')
return self.to_timestamp(how='start') + adjust # depends on [control=['if'], data=[]]
else:
adjust = Timedelta(1, 'ns')
return (self + self.freq).to_timestamp(how='start') - adjust # depends on [control=['if'], data=[]]
if freq is None:
(base, mult) = libfrequencies.get_freq_code(self.freq)
freq = libfrequencies.get_to_timestamp_base(base) # depends on [control=['if'], data=['freq']]
else:
freq = Period._maybe_convert_freq(freq)
(base, mult) = libfrequencies.get_freq_code(freq)
new_data = self.asfreq(freq, how=how)
new_data = libperiod.periodarr_to_dt64arr(new_data.asi8, base)
return DatetimeArray._from_sequence(new_data, freq='infer') |
def read_others(self):
"""Reads the answers, authorities and additionals section
of the packet"""
format = '!HHiH'
length = struct.calcsize(format)
n = self.num_answers + self.num_authorities + self.num_additionals
for i in range(0, n):
domain = self.read_name()
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
rec = None
if info[0] == _TYPE_A:
rec = DNSAddress(domain,
info[0], info[1], info[2],
self.read_string(4))
elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
rec = DNSPointer(domain,
info[0], info[1], info[2],
self.read_name())
elif info[0] == _TYPE_TXT:
rec = DNSText(domain,
info[0], info[1], info[2],
self.read_string(info[3]))
elif info[0] == _TYPE_SRV:
rec = DNSService(domain,
info[0], info[1], info[2],
self.read_unsigned_short(),
self.read_unsigned_short(),
self.read_unsigned_short(),
self.read_name())
elif info[0] == _TYPE_HINFO:
rec = DNSHinfo(domain,
info[0], info[1], info[2],
self.read_character_string(),
self.read_character_string())
elif info[0] == _TYPE_RRSIG:
rec = DNSSignatureI(domain,
info[0], info[1], info[2],
self.read_string(18),
self.read_name(),
self.read_character_string())
elif info[0] == _TYPE_AAAA:
rec = DNSAddress(domain,
info[0], info[1], info[2],
self.read_string(16))
else:
# Try to ignore types we don't know about
# this may mean the rest of the name is
# unable to be parsed, and may show errors
# so this is left for debugging. New types
# encountered need to be parsed properly.
#
#print "UNKNOWN TYPE = " + str(info[0])
#raise BadTypeInNameException
pass
if rec is not None:
self.answers.append(rec) | def function[read_others, parameter[self]]:
constant[Reads the answers, authorities and additionals section
of the packet]
variable[format] assign[=] constant[!HHiH]
variable[length] assign[=] call[name[struct].calcsize, parameter[name[format]]]
variable[n] assign[=] binary_operation[binary_operation[name[self].num_answers + name[self].num_authorities] + name[self].num_additionals]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[n]]]] begin[:]
variable[domain] assign[=] call[name[self].read_name, parameter[]]
variable[info] assign[=] call[name[struct].unpack, parameter[name[format], call[name[self].data][<ast.Slice object at 0x7da1b0fb8040>]]]
<ast.AugAssign object at 0x7da1b0fb84c0>
variable[rec] assign[=] constant[None]
if compare[call[name[info]][constant[0]] equal[==] name[_TYPE_A]] begin[:]
variable[rec] assign[=] call[name[DNSAddress], parameter[name[domain], call[name[info]][constant[0]], call[name[info]][constant[1]], call[name[info]][constant[2]], call[name[self].read_string, parameter[constant[4]]]]]
if compare[name[rec] is_not constant[None]] begin[:]
call[name[self].answers.append, parameter[name[rec]]] | keyword[def] identifier[read_others] ( identifier[self] ):
literal[string]
identifier[format] = literal[string]
identifier[length] = identifier[struct] . identifier[calcsize] ( identifier[format] )
identifier[n] = identifier[self] . identifier[num_answers] + identifier[self] . identifier[num_authorities] + identifier[self] . identifier[num_additionals]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] ):
identifier[domain] = identifier[self] . identifier[read_name] ()
identifier[info] = identifier[struct] . identifier[unpack] ( identifier[format] ,
identifier[self] . identifier[data] [ identifier[self] . identifier[offset] : identifier[self] . identifier[offset] + identifier[length] ])
identifier[self] . identifier[offset] += identifier[length]
identifier[rec] = keyword[None]
keyword[if] identifier[info] [ literal[int] ]== identifier[_TYPE_A] :
identifier[rec] = identifier[DNSAddress] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_string] ( literal[int] ))
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_CNAME] keyword[or] identifier[info] [ literal[int] ]== identifier[_TYPE_PTR] :
identifier[rec] = identifier[DNSPointer] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_name] ())
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_TXT] :
identifier[rec] = identifier[DNSText] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_string] ( identifier[info] [ literal[int] ]))
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_SRV] :
identifier[rec] = identifier[DNSService] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_unsigned_short] (),
identifier[self] . identifier[read_unsigned_short] (),
identifier[self] . identifier[read_unsigned_short] (),
identifier[self] . identifier[read_name] ())
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_HINFO] :
identifier[rec] = identifier[DNSHinfo] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_character_string] (),
identifier[self] . identifier[read_character_string] ())
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_RRSIG] :
identifier[rec] = identifier[DNSSignatureI] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_string] ( literal[int] ),
identifier[self] . identifier[read_name] (),
identifier[self] . identifier[read_character_string] ())
keyword[elif] identifier[info] [ literal[int] ]== identifier[_TYPE_AAAA] :
identifier[rec] = identifier[DNSAddress] ( identifier[domain] ,
identifier[info] [ literal[int] ], identifier[info] [ literal[int] ], identifier[info] [ literal[int] ],
identifier[self] . identifier[read_string] ( literal[int] ))
keyword[else] :
keyword[pass]
keyword[if] identifier[rec] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[answers] . identifier[append] ( identifier[rec] ) | def read_others(self):
"""Reads the answers, authorities and additionals section
of the packet"""
format = '!HHiH'
length = struct.calcsize(format)
n = self.num_answers + self.num_authorities + self.num_additionals
for i in range(0, n):
domain = self.read_name()
info = struct.unpack(format, self.data[self.offset:self.offset + length])
self.offset += length
rec = None
if info[0] == _TYPE_A:
rec = DNSAddress(domain, info[0], info[1], info[2], self.read_string(4)) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
rec = DNSPointer(domain, info[0], info[1], info[2], self.read_name()) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_TXT:
rec = DNSText(domain, info[0], info[1], info[2], self.read_string(info[3])) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_SRV:
rec = DNSService(domain, info[0], info[1], info[2], self.read_unsigned_short(), self.read_unsigned_short(), self.read_unsigned_short(), self.read_name()) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_HINFO:
rec = DNSHinfo(domain, info[0], info[1], info[2], self.read_character_string(), self.read_character_string()) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_RRSIG:
rec = DNSSignatureI(domain, info[0], info[1], info[2], self.read_string(18), self.read_name(), self.read_character_string()) # depends on [control=['if'], data=[]]
elif info[0] == _TYPE_AAAA:
rec = DNSAddress(domain, info[0], info[1], info[2], self.read_string(16)) # depends on [control=['if'], data=[]]
else:
# Try to ignore types we don't know about
# this may mean the rest of the name is
# unable to be parsed, and may show errors
# so this is left for debugging. New types
# encountered need to be parsed properly.
#
#print "UNKNOWN TYPE = " + str(info[0])
#raise BadTypeInNameException
pass
if rec is not None:
self.answers.append(rec) # depends on [control=['if'], data=['rec']] # depends on [control=['for'], data=[]] |
def generate_hub_key(resolver_id, hub_id, repository_id, entity_type, entity_id=None):
"""Create and return an array of hub keys
:param resolver_id: the service that can resolve this key
:param hub_id: the unique id of the hub
:param repository_id: the type of id that the provider recognises
:param entity_type: the type of the entity to which the key refers.
:param entity_id: ID of entity (UUID)
:returns: a hub key
:raises:
:AttributeError: if a parameter has a bad value
:TypeError: if a parameter has a bad value
:ValueError: if a parameter has a bad value
"""
parsed = urlparse(resolver_id)
if not parsed.scheme:
parsed = parsed._replace(scheme=PROTOCOL, netloc=idna_encode(parsed.path.lower()), path=u'')
else:
parsed = parsed._replace(netloc=idna_encode(parsed.netloc.lower()))
resolver_id = urlunparse(parsed)
hub_id = url_quote(hub_id.lower())
if not entity_id:
entity_id = str(uuid.uuid4()).replace('-', '')
else:
match_part(entity_id, 'entity_id')
# If any of these checks fail a ValueError exception is raised
match_part(resolver_id, 'resolver_id')
match_part(hub_id, 'hub_id')
match_part(repository_id, 'repository_id')
match_part(entity_type, 'entity_type')
hub_key = SEPARATOR.join(
[resolver_id, SCHEMA, hub_id, repository_id, entity_type, entity_id])
return hub_key | def function[generate_hub_key, parameter[resolver_id, hub_id, repository_id, entity_type, entity_id]]:
constant[Create and return an array of hub keys
:param resolver_id: the service that can resolve this key
:param hub_id: the unique id of the hub
:param repository_id: the type of id that the provider recognises
:param entity_type: the type of the entity to which the key refers.
:param entity_id: ID of entity (UUID)
:returns: a hub key
:raises:
:AttributeError: if a parameter has a bad value
:TypeError: if a parameter has a bad value
:ValueError: if a parameter has a bad value
]
variable[parsed] assign[=] call[name[urlparse], parameter[name[resolver_id]]]
if <ast.UnaryOp object at 0x7da1affee230> begin[:]
variable[parsed] assign[=] call[name[parsed]._replace, parameter[]]
variable[resolver_id] assign[=] call[name[urlunparse], parameter[name[parsed]]]
variable[hub_id] assign[=] call[name[url_quote], parameter[call[name[hub_id].lower, parameter[]]]]
if <ast.UnaryOp object at 0x7da1affee170> begin[:]
variable[entity_id] assign[=] call[call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]].replace, parameter[constant[-], constant[]]]
call[name[match_part], parameter[name[resolver_id], constant[resolver_id]]]
call[name[match_part], parameter[name[hub_id], constant[hub_id]]]
call[name[match_part], parameter[name[repository_id], constant[repository_id]]]
call[name[match_part], parameter[name[entity_type], constant[entity_type]]]
variable[hub_key] assign[=] call[name[SEPARATOR].join, parameter[list[[<ast.Name object at 0x7da20c6c7d30>, <ast.Name object at 0x7da20c6c7700>, <ast.Name object at 0x7da20c6c4a00>, <ast.Name object at 0x7da20c6c6830>, <ast.Name object at 0x7da20c6c6410>, <ast.Name object at 0x7da20c6c5360>]]]]
return[name[hub_key]] | keyword[def] identifier[generate_hub_key] ( identifier[resolver_id] , identifier[hub_id] , identifier[repository_id] , identifier[entity_type] , identifier[entity_id] = keyword[None] ):
literal[string]
identifier[parsed] = identifier[urlparse] ( identifier[resolver_id] )
keyword[if] keyword[not] identifier[parsed] . identifier[scheme] :
identifier[parsed] = identifier[parsed] . identifier[_replace] ( identifier[scheme] = identifier[PROTOCOL] , identifier[netloc] = identifier[idna_encode] ( identifier[parsed] . identifier[path] . identifier[lower] ()), identifier[path] = literal[string] )
keyword[else] :
identifier[parsed] = identifier[parsed] . identifier[_replace] ( identifier[netloc] = identifier[idna_encode] ( identifier[parsed] . identifier[netloc] . identifier[lower] ()))
identifier[resolver_id] = identifier[urlunparse] ( identifier[parsed] )
identifier[hub_id] = identifier[url_quote] ( identifier[hub_id] . identifier[lower] ())
keyword[if] keyword[not] identifier[entity_id] :
identifier[entity_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ()). identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[match_part] ( identifier[entity_id] , literal[string] )
identifier[match_part] ( identifier[resolver_id] , literal[string] )
identifier[match_part] ( identifier[hub_id] , literal[string] )
identifier[match_part] ( identifier[repository_id] , literal[string] )
identifier[match_part] ( identifier[entity_type] , literal[string] )
identifier[hub_key] = identifier[SEPARATOR] . identifier[join] (
[ identifier[resolver_id] , identifier[SCHEMA] , identifier[hub_id] , identifier[repository_id] , identifier[entity_type] , identifier[entity_id] ])
keyword[return] identifier[hub_key] | def generate_hub_key(resolver_id, hub_id, repository_id, entity_type, entity_id=None):
"""Create and return an array of hub keys
:param resolver_id: the service that can resolve this key
:param hub_id: the unique id of the hub
:param repository_id: the type of id that the provider recognises
:param entity_type: the type of the entity to which the key refers.
:param entity_id: ID of entity (UUID)
:returns: a hub key
:raises:
:AttributeError: if a parameter has a bad value
:TypeError: if a parameter has a bad value
:ValueError: if a parameter has a bad value
"""
parsed = urlparse(resolver_id)
if not parsed.scheme:
parsed = parsed._replace(scheme=PROTOCOL, netloc=idna_encode(parsed.path.lower()), path=u'') # depends on [control=['if'], data=[]]
else:
parsed = parsed._replace(netloc=idna_encode(parsed.netloc.lower()))
resolver_id = urlunparse(parsed)
hub_id = url_quote(hub_id.lower())
if not entity_id:
entity_id = str(uuid.uuid4()).replace('-', '') # depends on [control=['if'], data=[]]
else:
match_part(entity_id, 'entity_id')
# If any of these checks fail a ValueError exception is raised
match_part(resolver_id, 'resolver_id')
match_part(hub_id, 'hub_id')
match_part(repository_id, 'repository_id')
match_part(entity_type, 'entity_type')
hub_key = SEPARATOR.join([resolver_id, SCHEMA, hub_id, repository_id, entity_type, entity_id])
return hub_key |
def set_logfile(path, instance):
"""Specify logfile path"""
global logfile
logfile = os.path.normpath(path) + '/hfos.' + instance + '.log' | def function[set_logfile, parameter[path, instance]]:
constant[Specify logfile path]
<ast.Global object at 0x7da1b0eda860>
variable[logfile] assign[=] binary_operation[binary_operation[binary_operation[call[name[os].path.normpath, parameter[name[path]]] + constant[/hfos.]] + name[instance]] + constant[.log]] | keyword[def] identifier[set_logfile] ( identifier[path] , identifier[instance] ):
literal[string]
keyword[global] identifier[logfile]
identifier[logfile] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[path] )+ literal[string] + identifier[instance] + literal[string] | def set_logfile(path, instance):
"""Specify logfile path"""
global logfile
logfile = os.path.normpath(path) + '/hfos.' + instance + '.log' |
def enableEditing(self, enabled):
"""Enable the editing buttons to add/remove rows/columns and to edit the data.
This method is also a slot.
In addition, the data of model will be made editable,
if the `enabled` parameter is true.
Args:
enabled (bool): This flag indicates, if the buttons
shall be activated.
"""
for button in self.buttons[1:]:
button.setEnabled(enabled)
if button.isChecked():
button.setChecked(False)
model = self.tableView.model()
if model is not None:
model.enableEditing(enabled) | def function[enableEditing, parameter[self, enabled]]:
constant[Enable the editing buttons to add/remove rows/columns and to edit the data.
This method is also a slot.
In addition, the data of model will be made editable,
if the `enabled` parameter is true.
Args:
enabled (bool): This flag indicates, if the buttons
shall be activated.
]
for taget[name[button]] in starred[call[name[self].buttons][<ast.Slice object at 0x7da20c6e5c60>]] begin[:]
call[name[button].setEnabled, parameter[name[enabled]]]
if call[name[button].isChecked, parameter[]] begin[:]
call[name[button].setChecked, parameter[constant[False]]]
variable[model] assign[=] call[name[self].tableView.model, parameter[]]
if compare[name[model] is_not constant[None]] begin[:]
call[name[model].enableEditing, parameter[name[enabled]]] | keyword[def] identifier[enableEditing] ( identifier[self] , identifier[enabled] ):
literal[string]
keyword[for] identifier[button] keyword[in] identifier[self] . identifier[buttons] [ literal[int] :]:
identifier[button] . identifier[setEnabled] ( identifier[enabled] )
keyword[if] identifier[button] . identifier[isChecked] ():
identifier[button] . identifier[setChecked] ( keyword[False] )
identifier[model] = identifier[self] . identifier[tableView] . identifier[model] ()
keyword[if] identifier[model] keyword[is] keyword[not] keyword[None] :
identifier[model] . identifier[enableEditing] ( identifier[enabled] ) | def enableEditing(self, enabled):
"""Enable the editing buttons to add/remove rows/columns and to edit the data.
This method is also a slot.
In addition, the data of model will be made editable,
if the `enabled` parameter is true.
Args:
enabled (bool): This flag indicates, if the buttons
shall be activated.
"""
for button in self.buttons[1:]:
button.setEnabled(enabled)
if button.isChecked():
button.setChecked(False) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['button']]
model = self.tableView.model()
if model is not None:
model.enableEditing(enabled) # depends on [control=['if'], data=['model']] |
def update(self):
"""
Connect to GitHub API endpoint specified by `_apicall_parameters()`,
postprocess the result using `_apiresult_postprocess()` and trigger
a cache update if the API call was successful.
If an error occurs, cache the empty result generated by
`_apiresult_error()`. Additionally, set up retrying after a certain
time.
Return `True` if the API call was successful, `False` otherwise.
Call this method directly if you want to invalidate the current cache.
Otherwise, just call `data()`, which will automatically call `update()`
if required.
"""
result = self.api.github_api(*self._apicall_parameters())
if result is None:
# an error occurred, try again after BACKOFF
self._next_update = datetime.now() + timedelta(seconds=self.BACKOFF)
# assume an empty result until the error disappears
self._cached_result = self._apiresult_error()
else:
# request successful, cache does not expire
self._next_update = None
# Write the new result into self._cached_result to be picked up by
# _data on `del self._data`.
self._cached_result = self._apiresult_postprocess(result)
# Don't `del self._data` if it has never been cached, that would create
# ugly database entries in the cache table.
if not self._first_lookup:
del self._data
else:
self._first_lookup = False
# signal success or error
return result is not None | def function[update, parameter[self]]:
constant[
Connect to GitHub API endpoint specified by `_apicall_parameters()`,
postprocess the result using `_apiresult_postprocess()` and trigger
a cache update if the API call was successful.
If an error occurs, cache the empty result generated by
`_apiresult_error()`. Additionally, set up retrying after a certain
time.
Return `True` if the API call was successful, `False` otherwise.
Call this method directly if you want to invalidate the current cache.
Otherwise, just call `data()`, which will automatically call `update()`
if required.
]
variable[result] assign[=] call[name[self].api.github_api, parameter[<ast.Starred object at 0x7da1b27ec9a0>]]
if compare[name[result] is constant[None]] begin[:]
name[self]._next_update assign[=] binary_operation[call[name[datetime].now, parameter[]] + call[name[timedelta], parameter[]]]
name[self]._cached_result assign[=] call[name[self]._apiresult_error, parameter[]]
if <ast.UnaryOp object at 0x7da1b27ed420> begin[:]
<ast.Delete object at 0x7da1b27ece80>
return[compare[name[result] is_not constant[None]]] | keyword[def] identifier[update] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[api] . identifier[github_api] (* identifier[self] . identifier[_apicall_parameters] ())
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[self] . identifier[_next_update] = identifier[datetime] . identifier[now] ()+ identifier[timedelta] ( identifier[seconds] = identifier[self] . identifier[BACKOFF] )
identifier[self] . identifier[_cached_result] = identifier[self] . identifier[_apiresult_error] ()
keyword[else] :
identifier[self] . identifier[_next_update] = keyword[None]
identifier[self] . identifier[_cached_result] = identifier[self] . identifier[_apiresult_postprocess] ( identifier[result] )
keyword[if] keyword[not] identifier[self] . identifier[_first_lookup] :
keyword[del] identifier[self] . identifier[_data]
keyword[else] :
identifier[self] . identifier[_first_lookup] = keyword[False]
keyword[return] identifier[result] keyword[is] keyword[not] keyword[None] | def update(self):
"""
Connect to GitHub API endpoint specified by `_apicall_parameters()`,
postprocess the result using `_apiresult_postprocess()` and trigger
a cache update if the API call was successful.
If an error occurs, cache the empty result generated by
`_apiresult_error()`. Additionally, set up retrying after a certain
time.
Return `True` if the API call was successful, `False` otherwise.
Call this method directly if you want to invalidate the current cache.
Otherwise, just call `data()`, which will automatically call `update()`
if required.
"""
result = self.api.github_api(*self._apicall_parameters())
if result is None:
# an error occurred, try again after BACKOFF
self._next_update = datetime.now() + timedelta(seconds=self.BACKOFF)
# assume an empty result until the error disappears
self._cached_result = self._apiresult_error() # depends on [control=['if'], data=[]]
else:
# request successful, cache does not expire
self._next_update = None
# Write the new result into self._cached_result to be picked up by
# _data on `del self._data`.
self._cached_result = self._apiresult_postprocess(result)
# Don't `del self._data` if it has never been cached, that would create
# ugly database entries in the cache table.
if not self._first_lookup:
del self._data # depends on [control=['if'], data=[]]
else:
self._first_lookup = False
# signal success or error
return result is not None |
def get(cls, controller):
'''Find a mapping that can apply to the given controller. Returns None if unsuccessful.
:param controller: :class:`Controller` to look up
:return: :class:`ControllerMapping`
'''
try:
return cls._registry[(controller.vendor_id, controller.product_id)]
except KeyError:
return None | def function[get, parameter[cls, controller]]:
constant[Find a mapping that can apply to the given controller. Returns None if unsuccessful.
:param controller: :class:`Controller` to look up
:return: :class:`ControllerMapping`
]
<ast.Try object at 0x7da2054a63b0> | keyword[def] identifier[get] ( identifier[cls] , identifier[controller] ):
literal[string]
keyword[try] :
keyword[return] identifier[cls] . identifier[_registry] [( identifier[controller] . identifier[vendor_id] , identifier[controller] . identifier[product_id] )]
keyword[except] identifier[KeyError] :
keyword[return] keyword[None] | def get(cls, controller):
"""Find a mapping that can apply to the given controller. Returns None if unsuccessful.
:param controller: :class:`Controller` to look up
:return: :class:`ControllerMapping`
"""
try:
return cls._registry[controller.vendor_id, controller.product_id] # depends on [control=['try'], data=[]]
except KeyError:
return None # depends on [control=['except'], data=[]] |
def join_cwd(self, path=None):
"""
Join the path with the current working directory. If it is
specified for this instance of the object it will be used,
otherwise rely on the global value.
"""
if self.working_dir:
logger.debug(
"'%s' instance 'working_dir' set to '%s' for join_cwd",
type(self).__name__, self.working_dir,
)
cwd = self.working_dir
else:
cwd = getcwd()
logger.debug(
"'%s' instance 'working_dir' unset; "
"default to process '%s' for join_cwd",
type(self).__name__, cwd,
)
if path:
return join(cwd, path)
return cwd | def function[join_cwd, parameter[self, path]]:
constant[
Join the path with the current working directory. If it is
specified for this instance of the object it will be used,
otherwise rely on the global value.
]
if name[self].working_dir begin[:]
call[name[logger].debug, parameter[constant['%s' instance 'working_dir' set to '%s' for join_cwd], call[name[type], parameter[name[self]]].__name__, name[self].working_dir]]
variable[cwd] assign[=] name[self].working_dir
if name[path] begin[:]
return[call[name[join], parameter[name[cwd], name[path]]]]
return[name[cwd]] | keyword[def] identifier[join_cwd] ( identifier[self] , identifier[path] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[working_dir] :
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[type] ( identifier[self] ). identifier[__name__] , identifier[self] . identifier[working_dir] ,
)
identifier[cwd] = identifier[self] . identifier[working_dir]
keyword[else] :
identifier[cwd] = identifier[getcwd] ()
identifier[logger] . identifier[debug] (
literal[string]
literal[string] ,
identifier[type] ( identifier[self] ). identifier[__name__] , identifier[cwd] ,
)
keyword[if] identifier[path] :
keyword[return] identifier[join] ( identifier[cwd] , identifier[path] )
keyword[return] identifier[cwd] | def join_cwd(self, path=None):
"""
Join the path with the current working directory. If it is
specified for this instance of the object it will be used,
otherwise rely on the global value.
"""
if self.working_dir:
logger.debug("'%s' instance 'working_dir' set to '%s' for join_cwd", type(self).__name__, self.working_dir)
cwd = self.working_dir # depends on [control=['if'], data=[]]
else:
cwd = getcwd()
logger.debug("'%s' instance 'working_dir' unset; default to process '%s' for join_cwd", type(self).__name__, cwd)
if path:
return join(cwd, path) # depends on [control=['if'], data=[]]
return cwd |
def determine_band_channel(kal_out):
"""Return band, channel, target frequency from kal output."""
band = ""
channel = ""
tgt_freq = ""
while band == "":
for line in kal_out.splitlines():
if "Using " in line and " channel " in line:
band = str(line.split()[1])
channel = str(line.split()[3])
tgt_freq = str(line.split()[4]).replace(
"(", "").replace(")", "")
if band == "":
band = None
return(band, channel, tgt_freq) | def function[determine_band_channel, parameter[kal_out]]:
constant[Return band, channel, target frequency from kal output.]
variable[band] assign[=] constant[]
variable[channel] assign[=] constant[]
variable[tgt_freq] assign[=] constant[]
while compare[name[band] equal[==] constant[]] begin[:]
for taget[name[line]] in starred[call[name[kal_out].splitlines, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18bcca020> begin[:]
variable[band] assign[=] call[name[str], parameter[call[call[name[line].split, parameter[]]][constant[1]]]]
variable[channel] assign[=] call[name[str], parameter[call[call[name[line].split, parameter[]]][constant[3]]]]
variable[tgt_freq] assign[=] call[call[call[name[str], parameter[call[call[name[line].split, parameter[]]][constant[4]]]].replace, parameter[constant[(], constant[]]].replace, parameter[constant[)], constant[]]]
if compare[name[band] equal[==] constant[]] begin[:]
variable[band] assign[=] constant[None]
return[tuple[[<ast.Name object at 0x7da18bcc9f60>, <ast.Name object at 0x7da18bcc99c0>, <ast.Name object at 0x7da18bccb0a0>]]] | keyword[def] identifier[determine_band_channel] ( identifier[kal_out] ):
literal[string]
identifier[band] = literal[string]
identifier[channel] = literal[string]
identifier[tgt_freq] = literal[string]
keyword[while] identifier[band] == literal[string] :
keyword[for] identifier[line] keyword[in] identifier[kal_out] . identifier[splitlines] ():
keyword[if] literal[string] keyword[in] identifier[line] keyword[and] literal[string] keyword[in] identifier[line] :
identifier[band] = identifier[str] ( identifier[line] . identifier[split] ()[ literal[int] ])
identifier[channel] = identifier[str] ( identifier[line] . identifier[split] ()[ literal[int] ])
identifier[tgt_freq] = identifier[str] ( identifier[line] . identifier[split] ()[ literal[int] ]). identifier[replace] (
literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[band] == literal[string] :
identifier[band] = keyword[None]
keyword[return] ( identifier[band] , identifier[channel] , identifier[tgt_freq] ) | def determine_band_channel(kal_out):
"""Return band, channel, target frequency from kal output."""
band = ''
channel = ''
tgt_freq = ''
while band == '':
for line in kal_out.splitlines():
if 'Using ' in line and ' channel ' in line:
band = str(line.split()[1])
channel = str(line.split()[3])
tgt_freq = str(line.split()[4]).replace('(', '').replace(')', '') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
if band == '':
band = None # depends on [control=['if'], data=['band']] # depends on [control=['while'], data=['band']]
return (band, channel, tgt_freq) |
def merge_configs(config: Dict[str, Any], default_config: Dict[str, Any]) -> Dict[str, Any]:
"""
Merges a `default` config with DAG config. Used to set default values
for a group of DAGs.
:param config: config to merge in default values
:type config: Dict[str, Any]
:param default_config: config to merge default values from
:type default_config: Dict[str, Any]
:returns: dict with merged configs
:type: Dict[str, Any]
"""
for key in default_config:
if key in config:
if isinstance(config[key], dict) and isinstance(default_config[key], dict):
merge_configs(config[key], default_config[key])
else:
config[key]: Any = default_config[key]
return config | def function[merge_configs, parameter[config, default_config]]:
constant[
Merges a `default` config with DAG config. Used to set default values
for a group of DAGs.
:param config: config to merge in default values
:type config: Dict[str, Any]
:param default_config: config to merge default values from
:type default_config: Dict[str, Any]
:returns: dict with merged configs
:type: Dict[str, Any]
]
for taget[name[key]] in starred[name[default_config]] begin[:]
if compare[name[key] in name[config]] begin[:]
if <ast.BoolOp object at 0x7da2044c3eb0> begin[:]
call[name[merge_configs], parameter[call[name[config]][name[key]], call[name[default_config]][name[key]]]]
return[name[config]] | keyword[def] identifier[merge_configs] ( identifier[config] : identifier[Dict] [ identifier[str] , identifier[Any] ], identifier[default_config] : identifier[Dict] [ identifier[str] , identifier[Any] ])-> identifier[Dict] [ identifier[str] , identifier[Any] ]:
literal[string]
keyword[for] identifier[key] keyword[in] identifier[default_config] :
keyword[if] identifier[key] keyword[in] identifier[config] :
keyword[if] identifier[isinstance] ( identifier[config] [ identifier[key] ], identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[default_config] [ identifier[key] ], identifier[dict] ):
identifier[merge_configs] ( identifier[config] [ identifier[key] ], identifier[default_config] [ identifier[key] ])
keyword[else] :
identifier[config] [ identifier[key] ]: identifier[Any] = identifier[default_config] [ identifier[key] ]
keyword[return] identifier[config] | def merge_configs(config: Dict[str, Any], default_config: Dict[str, Any]) -> Dict[str, Any]:
"""
Merges a `default` config with DAG config. Used to set default values
for a group of DAGs.
:param config: config to merge in default values
:type config: Dict[str, Any]
:param default_config: config to merge default values from
:type default_config: Dict[str, Any]
:returns: dict with merged configs
:type: Dict[str, Any]
"""
for key in default_config:
if key in config:
if isinstance(config[key], dict) and isinstance(default_config[key], dict):
merge_configs(config[key], default_config[key]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key', 'config']]
else:
config[key]: Any = default_config[key] # depends on [control=['for'], data=['key']]
return config |
def db_for_write(self, model, **hints):
"""
Attempts to write auth models go to duashttp.
"""
if model._meta.app_label == 'duashttp':
if not DUAS_ENABLE_DB_WRITE:
raise ImproperlyConfigured(
"Set `DUAS_ENABLE_DB_WRITE` to True in your settings to enable "
"write operations on unity asset server database"
)
return DUAS_DB_ROUTE_PREFIX
return None | def function[db_for_write, parameter[self, model]]:
constant[
Attempts to write auth models go to duashttp.
]
if compare[name[model]._meta.app_label equal[==] constant[duashttp]] begin[:]
if <ast.UnaryOp object at 0x7da1b142acb0> begin[:]
<ast.Raise object at 0x7da1b142bdf0>
return[name[DUAS_DB_ROUTE_PREFIX]]
return[constant[None]] | keyword[def] identifier[db_for_write] ( identifier[self] , identifier[model] ,** identifier[hints] ):
literal[string]
keyword[if] identifier[model] . identifier[_meta] . identifier[app_label] == literal[string] :
keyword[if] keyword[not] identifier[DUAS_ENABLE_DB_WRITE] :
keyword[raise] identifier[ImproperlyConfigured] (
literal[string]
literal[string]
)
keyword[return] identifier[DUAS_DB_ROUTE_PREFIX]
keyword[return] keyword[None] | def db_for_write(self, model, **hints):
"""
Attempts to write auth models go to duashttp.
"""
if model._meta.app_label == 'duashttp':
if not DUAS_ENABLE_DB_WRITE:
raise ImproperlyConfigured('Set `DUAS_ENABLE_DB_WRITE` to True in your settings to enable write operations on unity asset server database') # depends on [control=['if'], data=[]]
return DUAS_DB_ROUTE_PREFIX # depends on [control=['if'], data=[]]
return None |
def moving_haplotype_diversity(h, size, start=0, stop=None, step=None):
"""Estimate haplotype diversity in moving windows.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
hd : ndarray, float, shape (n_windows,)
Haplotype diversity.
"""
hd = moving_statistic(values=h, statistic=haplotype_diversity, size=size,
start=start, stop=stop, step=step)
return hd | def function[moving_haplotype_diversity, parameter[h, size, start, stop, step]]:
constant[Estimate haplotype diversity in moving windows.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
hd : ndarray, float, shape (n_windows,)
Haplotype diversity.
]
variable[hd] assign[=] call[name[moving_statistic], parameter[]]
return[name[hd]] | keyword[def] identifier[moving_haplotype_diversity] ( identifier[h] , identifier[size] , identifier[start] = literal[int] , identifier[stop] = keyword[None] , identifier[step] = keyword[None] ):
literal[string]
identifier[hd] = identifier[moving_statistic] ( identifier[values] = identifier[h] , identifier[statistic] = identifier[haplotype_diversity] , identifier[size] = identifier[size] ,
identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] )
keyword[return] identifier[hd] | def moving_haplotype_diversity(h, size, start=0, stop=None, step=None):
"""Estimate haplotype diversity in moving windows.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
hd : ndarray, float, shape (n_windows,)
Haplotype diversity.
"""
hd = moving_statistic(values=h, statistic=haplotype_diversity, size=size, start=start, stop=stop, step=step)
return hd |
def read_h5ad(filename, backed: Optional[str] = None, chunk_size: int = 6000):
"""Read ``.h5ad``-formatted hdf5 file.
Parameters
----------
filename
File name of data file.
backed : {``None``, ``'r'``, ``'r+'``}
If ``'r'``, load :class:`~anndata.AnnData` in ``backed`` mode instead
of fully loading it into memory (`memory` mode). If you want to modify
backed attributes of the AnnData object, you need to choose ``'r+'``.
chunk_size
Used only when loading sparse dataset that is stored as dense.
Loading iterates through chunks of the dataset of this row size
until it reads the whole dataset.
Higher size means higher memory consumption and higher loading speed.
"""
if isinstance(backed, bool):
# We pass `None`s through to h5py.File, and its default is “a”
# (=“r+”, but create the file if it doesn’t exist)
backed = 'r+' if backed else None
warnings.warn(
"In a future version, read_h5ad will no longer explicitly support "
"boolean arguments. Specify the read mode, or leave `backed=None`.",
DeprecationWarning,
)
if backed:
# open in backed-mode
return AnnData(filename=filename, filemode=backed)
else:
# load everything into memory
constructor_args = _read_args_from_h5ad(filename=filename, chunk_size=chunk_size)
X = constructor_args[0]
dtype = None
if X is not None:
dtype = X.dtype.name # maintain dtype, since 0.7
return AnnData(*_read_args_from_h5ad(filename=filename, chunk_size=chunk_size), dtype=dtype) | def function[read_h5ad, parameter[filename, backed, chunk_size]]:
constant[Read ``.h5ad``-formatted hdf5 file.
Parameters
----------
filename
File name of data file.
backed : {``None``, ``'r'``, ``'r+'``}
If ``'r'``, load :class:`~anndata.AnnData` in ``backed`` mode instead
of fully loading it into memory (`memory` mode). If you want to modify
backed attributes of the AnnData object, you need to choose ``'r+'``.
chunk_size
Used only when loading sparse dataset that is stored as dense.
Loading iterates through chunks of the dataset of this row size
until it reads the whole dataset.
Higher size means higher memory consumption and higher loading speed.
]
if call[name[isinstance], parameter[name[backed], name[bool]]] begin[:]
variable[backed] assign[=] <ast.IfExp object at 0x7da20c991750>
call[name[warnings].warn, parameter[constant[In a future version, read_h5ad will no longer explicitly support boolean arguments. Specify the read mode, or leave `backed=None`.], name[DeprecationWarning]]]
if name[backed] begin[:]
return[call[name[AnnData], parameter[]]] | keyword[def] identifier[read_h5ad] ( identifier[filename] , identifier[backed] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[chunk_size] : identifier[int] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[backed] , identifier[bool] ):
identifier[backed] = literal[string] keyword[if] identifier[backed] keyword[else] keyword[None]
identifier[warnings] . identifier[warn] (
literal[string]
literal[string] ,
identifier[DeprecationWarning] ,
)
keyword[if] identifier[backed] :
keyword[return] identifier[AnnData] ( identifier[filename] = identifier[filename] , identifier[filemode] = identifier[backed] )
keyword[else] :
identifier[constructor_args] = identifier[_read_args_from_h5ad] ( identifier[filename] = identifier[filename] , identifier[chunk_size] = identifier[chunk_size] )
identifier[X] = identifier[constructor_args] [ literal[int] ]
identifier[dtype] = keyword[None]
keyword[if] identifier[X] keyword[is] keyword[not] keyword[None] :
identifier[dtype] = identifier[X] . identifier[dtype] . identifier[name]
keyword[return] identifier[AnnData] (* identifier[_read_args_from_h5ad] ( identifier[filename] = identifier[filename] , identifier[chunk_size] = identifier[chunk_size] ), identifier[dtype] = identifier[dtype] ) | def read_h5ad(filename, backed: Optional[str]=None, chunk_size: int=6000):
"""Read ``.h5ad``-formatted hdf5 file.
Parameters
----------
filename
File name of data file.
backed : {``None``, ``'r'``, ``'r+'``}
If ``'r'``, load :class:`~anndata.AnnData` in ``backed`` mode instead
of fully loading it into memory (`memory` mode). If you want to modify
backed attributes of the AnnData object, you need to choose ``'r+'``.
chunk_size
Used only when loading sparse dataset that is stored as dense.
Loading iterates through chunks of the dataset of this row size
until it reads the whole dataset.
Higher size means higher memory consumption and higher loading speed.
"""
if isinstance(backed, bool):
# We pass `None`s through to h5py.File, and its default is “a”
# (=“r+”, but create the file if it doesn’t exist)
backed = 'r+' if backed else None
warnings.warn('In a future version, read_h5ad will no longer explicitly support boolean arguments. Specify the read mode, or leave `backed=None`.', DeprecationWarning) # depends on [control=['if'], data=[]]
if backed:
# open in backed-mode
return AnnData(filename=filename, filemode=backed) # depends on [control=['if'], data=[]]
else:
# load everything into memory
constructor_args = _read_args_from_h5ad(filename=filename, chunk_size=chunk_size)
X = constructor_args[0]
dtype = None
if X is not None:
dtype = X.dtype.name # maintain dtype, since 0.7 # depends on [control=['if'], data=['X']]
return AnnData(*_read_args_from_h5ad(filename=filename, chunk_size=chunk_size), dtype=dtype) |
def change (properties, feature, value = None):
""" Returns a modified version of properties with all values of the
given feature replaced by the given value.
If 'value' is None the feature will be removed.
"""
assert is_iterable_typed(properties, basestring)
assert isinstance(feature, basestring)
assert isinstance(value, (basestring, type(None)))
result = []
feature = add_grist (feature)
for p in properties:
if get_grist (p) == feature:
if value:
result.append (replace_grist (value, feature))
else:
result.append (p)
return result | def function[change, parameter[properties, feature, value]]:
constant[ Returns a modified version of properties with all values of the
given feature replaced by the given value.
If 'value' is None the feature will be removed.
]
assert[call[name[is_iterable_typed], parameter[name[properties], name[basestring]]]]
assert[call[name[isinstance], parameter[name[feature], name[basestring]]]]
assert[call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da1b20971f0>, <ast.Call object at 0x7da1b2097400>]]]]]
variable[result] assign[=] list[[]]
variable[feature] assign[=] call[name[add_grist], parameter[name[feature]]]
for taget[name[p]] in starred[name[properties]] begin[:]
if compare[call[name[get_grist], parameter[name[p]]] equal[==] name[feature]] begin[:]
if name[value] begin[:]
call[name[result].append, parameter[call[name[replace_grist], parameter[name[value], name[feature]]]]]
return[name[result]] | keyword[def] identifier[change] ( identifier[properties] , identifier[feature] , identifier[value] = keyword[None] ):
literal[string]
keyword[assert] identifier[is_iterable_typed] ( identifier[properties] , identifier[basestring] )
keyword[assert] identifier[isinstance] ( identifier[feature] , identifier[basestring] )
keyword[assert] identifier[isinstance] ( identifier[value] ,( identifier[basestring] , identifier[type] ( keyword[None] )))
identifier[result] =[]
identifier[feature] = identifier[add_grist] ( identifier[feature] )
keyword[for] identifier[p] keyword[in] identifier[properties] :
keyword[if] identifier[get_grist] ( identifier[p] )== identifier[feature] :
keyword[if] identifier[value] :
identifier[result] . identifier[append] ( identifier[replace_grist] ( identifier[value] , identifier[feature] ))
keyword[else] :
identifier[result] . identifier[append] ( identifier[p] )
keyword[return] identifier[result] | def change(properties, feature, value=None):
""" Returns a modified version of properties with all values of the
given feature replaced by the given value.
If 'value' is None the feature will be removed.
"""
assert is_iterable_typed(properties, basestring)
assert isinstance(feature, basestring)
assert isinstance(value, (basestring, type(None)))
result = []
feature = add_grist(feature)
for p in properties:
if get_grist(p) == feature:
if value:
result.append(replace_grist(value, feature)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['feature']]
else:
result.append(p) # depends on [control=['for'], data=['p']]
return result |
def to_yaml(template, clean_up=False, long_form=False):
"""
Assume the input is JSON and convert to YAML
"""
data = load_json(template)
if clean_up:
data = clean(data)
return dump_yaml(data, clean_up, long_form) | def function[to_yaml, parameter[template, clean_up, long_form]]:
constant[
Assume the input is JSON and convert to YAML
]
variable[data] assign[=] call[name[load_json], parameter[name[template]]]
if name[clean_up] begin[:]
variable[data] assign[=] call[name[clean], parameter[name[data]]]
return[call[name[dump_yaml], parameter[name[data], name[clean_up], name[long_form]]]] | keyword[def] identifier[to_yaml] ( identifier[template] , identifier[clean_up] = keyword[False] , identifier[long_form] = keyword[False] ):
literal[string]
identifier[data] = identifier[load_json] ( identifier[template] )
keyword[if] identifier[clean_up] :
identifier[data] = identifier[clean] ( identifier[data] )
keyword[return] identifier[dump_yaml] ( identifier[data] , identifier[clean_up] , identifier[long_form] ) | def to_yaml(template, clean_up=False, long_form=False):
"""
Assume the input is JSON and convert to YAML
"""
data = load_json(template)
if clean_up:
data = clean(data) # depends on [control=['if'], data=[]]
return dump_yaml(data, clean_up, long_form) |
def gracefulShutdown(self):
"""Start shutting down"""
if not self.bf.perspective:
log.msg("No active connection, shutting down NOW")
reactor.stop()
return
log.msg(
"Telling the master we want to shutdown after any running builds are finished")
d = self.bf.perspective.callRemote("shutdown")
def _shutdownfailed(err):
if err.check(AttributeError):
log.msg(
"Master does not support worker initiated shutdown. Upgrade master to 0.8.3 or later to use this feature.")
else:
log.msg('callRemote("shutdown") failed')
log.err(err)
d.addErrback(_shutdownfailed)
return d | def function[gracefulShutdown, parameter[self]]:
constant[Start shutting down]
if <ast.UnaryOp object at 0x7da18eb56410> begin[:]
call[name[log].msg, parameter[constant[No active connection, shutting down NOW]]]
call[name[reactor].stop, parameter[]]
return[None]
call[name[log].msg, parameter[constant[Telling the master we want to shutdown after any running builds are finished]]]
variable[d] assign[=] call[name[self].bf.perspective.callRemote, parameter[constant[shutdown]]]
def function[_shutdownfailed, parameter[err]]:
if call[name[err].check, parameter[name[AttributeError]]] begin[:]
call[name[log].msg, parameter[constant[Master does not support worker initiated shutdown. Upgrade master to 0.8.3 or later to use this feature.]]]
call[name[d].addErrback, parameter[name[_shutdownfailed]]]
return[name[d]] | keyword[def] identifier[gracefulShutdown] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[bf] . identifier[perspective] :
identifier[log] . identifier[msg] ( literal[string] )
identifier[reactor] . identifier[stop] ()
keyword[return]
identifier[log] . identifier[msg] (
literal[string] )
identifier[d] = identifier[self] . identifier[bf] . identifier[perspective] . identifier[callRemote] ( literal[string] )
keyword[def] identifier[_shutdownfailed] ( identifier[err] ):
keyword[if] identifier[err] . identifier[check] ( identifier[AttributeError] ):
identifier[log] . identifier[msg] (
literal[string] )
keyword[else] :
identifier[log] . identifier[msg] ( literal[string] )
identifier[log] . identifier[err] ( identifier[err] )
identifier[d] . identifier[addErrback] ( identifier[_shutdownfailed] )
keyword[return] identifier[d] | def gracefulShutdown(self):
"""Start shutting down"""
if not self.bf.perspective:
log.msg('No active connection, shutting down NOW')
reactor.stop()
return # depends on [control=['if'], data=[]]
log.msg('Telling the master we want to shutdown after any running builds are finished')
d = self.bf.perspective.callRemote('shutdown')
def _shutdownfailed(err):
if err.check(AttributeError):
log.msg('Master does not support worker initiated shutdown. Upgrade master to 0.8.3 or later to use this feature.') # depends on [control=['if'], data=[]]
else:
log.msg('callRemote("shutdown") failed')
log.err(err)
d.addErrback(_shutdownfailed)
return d |
def make_input_stream():
"""Creates a :py:class:`Queue` object and a co-routine yielding from that
queue. The queue should be populated with 2-tuples of the form `(command,
message)`, where `command` is one of [`msg`, `end`].
When the `end` command is recieved, the co-routine returns, ending the
stream.
When a `msg` command is received, the accompanying message is encoded and
yielded as a ``bytes`` object.
:return: tuple of (queue, stream)"""
input_queue = Queue()
def input_stream():
while True:
cmd, msg = input_queue.get()
if cmd == 'end':
input_queue.task_done()
return
elif cmd == 'msg':
yield msg.encode()
input_queue.task_done()
return input_queue, input_stream | def function[make_input_stream, parameter[]]:
constant[Creates a :py:class:`Queue` object and a co-routine yielding from that
queue. The queue should be populated with 2-tuples of the form `(command,
message)`, where `command` is one of [`msg`, `end`].
When the `end` command is recieved, the co-routine returns, ending the
stream.
When a `msg` command is received, the accompanying message is encoded and
yielded as a ``bytes`` object.
:return: tuple of (queue, stream)]
variable[input_queue] assign[=] call[name[Queue], parameter[]]
def function[input_stream, parameter[]]:
while constant[True] begin[:]
<ast.Tuple object at 0x7da1b197ea70> assign[=] call[name[input_queue].get, parameter[]]
if compare[name[cmd] equal[==] constant[end]] begin[:]
call[name[input_queue].task_done, parameter[]]
return[None]
return[tuple[[<ast.Name object at 0x7da1b197c970>, <ast.Name object at 0x7da1b197eb00>]]] | keyword[def] identifier[make_input_stream] ():
literal[string]
identifier[input_queue] = identifier[Queue] ()
keyword[def] identifier[input_stream] ():
keyword[while] keyword[True] :
identifier[cmd] , identifier[msg] = identifier[input_queue] . identifier[get] ()
keyword[if] identifier[cmd] == literal[string] :
identifier[input_queue] . identifier[task_done] ()
keyword[return]
keyword[elif] identifier[cmd] == literal[string] :
keyword[yield] identifier[msg] . identifier[encode] ()
identifier[input_queue] . identifier[task_done] ()
keyword[return] identifier[input_queue] , identifier[input_stream] | def make_input_stream():
"""Creates a :py:class:`Queue` object and a co-routine yielding from that
queue. The queue should be populated with 2-tuples of the form `(command,
message)`, where `command` is one of [`msg`, `end`].
When the `end` command is recieved, the co-routine returns, ending the
stream.
When a `msg` command is received, the accompanying message is encoded and
yielded as a ``bytes`` object.
:return: tuple of (queue, stream)"""
input_queue = Queue()
def input_stream():
while True:
(cmd, msg) = input_queue.get()
if cmd == 'end':
input_queue.task_done()
return # depends on [control=['if'], data=[]]
elif cmd == 'msg':
yield msg.encode()
input_queue.task_done() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return (input_queue, input_stream) |
def JO(cpu, target):
"""
Jumps short if overflow.
:param cpu: current CPU.
:param target: destination operand.
"""
cpu.PC = Operators.ITEBV(cpu.address_bit_size, cpu.OF, target.read(), cpu.PC) | def function[JO, parameter[cpu, target]]:
constant[
Jumps short if overflow.
:param cpu: current CPU.
:param target: destination operand.
]
name[cpu].PC assign[=] call[name[Operators].ITEBV, parameter[name[cpu].address_bit_size, name[cpu].OF, call[name[target].read, parameter[]], name[cpu].PC]] | keyword[def] identifier[JO] ( identifier[cpu] , identifier[target] ):
literal[string]
identifier[cpu] . identifier[PC] = identifier[Operators] . identifier[ITEBV] ( identifier[cpu] . identifier[address_bit_size] , identifier[cpu] . identifier[OF] , identifier[target] . identifier[read] (), identifier[cpu] . identifier[PC] ) | def JO(cpu, target):
"""
Jumps short if overflow.
:param cpu: current CPU.
:param target: destination operand.
"""
cpu.PC = Operators.ITEBV(cpu.address_bit_size, cpu.OF, target.read(), cpu.PC) |
def contains(self, clr):
"""
Returns True if the given color is part of this color range.
Check whether each h, s, b, a component of the color
falls within the defined range for that component.
If the given color is grayscale,
checks against the definitions for black and white.
"""
if not isinstance(clr, Color):
return False
if not isinstance(clr, _list):
clr = [clr]
for clr in clr:
if clr.is_grey and not self.grayscale:
return (self.black.contains(clr) or \
self.white.contains(clr))
for r, v in [(self.h, clr.h), (self.s, clr.s), (self.b, clr.brightness), (self.a, clr.a)]:
if isinstance(r, _list):
pass
elif isinstance(r, tuple):
r = [r]
else:
r = [(r, r)]
for min, max in r:
if not (min <= v <= max):
return False
return True | def function[contains, parameter[self, clr]]:
constant[
Returns True if the given color is part of this color range.
Check whether each h, s, b, a component of the color
falls within the defined range for that component.
If the given color is grayscale,
checks against the definitions for black and white.
]
if <ast.UnaryOp object at 0x7da1afff6aa0> begin[:]
return[constant[False]]
if <ast.UnaryOp object at 0x7da1afff6b00> begin[:]
variable[clr] assign[=] list[[<ast.Name object at 0x7da1afff45e0>]]
for taget[name[clr]] in starred[name[clr]] begin[:]
if <ast.BoolOp object at 0x7da1afff79d0> begin[:]
return[<ast.BoolOp object at 0x7da1afff62c0>]
for taget[tuple[[<ast.Name object at 0x7da1afff6140>, <ast.Name object at 0x7da1afff4460>]]] in starred[list[[<ast.Tuple object at 0x7da1afff42e0>, <ast.Tuple object at 0x7da1afff41f0>, <ast.Tuple object at 0x7da1afff4340>, <ast.Tuple object at 0x7da1afff46a0>]]] begin[:]
if call[name[isinstance], parameter[name[r], name[_list]]] begin[:]
pass
for taget[tuple[[<ast.Name object at 0x7da1afff4e80>, <ast.Name object at 0x7da1afff4ee0>]]] in starred[name[r]] begin[:]
if <ast.UnaryOp object at 0x7da1afff4bb0> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[contains] ( identifier[self] , identifier[clr] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[clr] , identifier[Color] ):
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[isinstance] ( identifier[clr] , identifier[_list] ):
identifier[clr] =[ identifier[clr] ]
keyword[for] identifier[clr] keyword[in] identifier[clr] :
keyword[if] identifier[clr] . identifier[is_grey] keyword[and] keyword[not] identifier[self] . identifier[grayscale] :
keyword[return] ( identifier[self] . identifier[black] . identifier[contains] ( identifier[clr] ) keyword[or] identifier[self] . identifier[white] . identifier[contains] ( identifier[clr] ))
keyword[for] identifier[r] , identifier[v] keyword[in] [( identifier[self] . identifier[h] , identifier[clr] . identifier[h] ),( identifier[self] . identifier[s] , identifier[clr] . identifier[s] ),( identifier[self] . identifier[b] , identifier[clr] . identifier[brightness] ),( identifier[self] . identifier[a] , identifier[clr] . identifier[a] )]:
keyword[if] identifier[isinstance] ( identifier[r] , identifier[_list] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[r] , identifier[tuple] ):
identifier[r] =[ identifier[r] ]
keyword[else] :
identifier[r] =[( identifier[r] , identifier[r] )]
keyword[for] identifier[min] , identifier[max] keyword[in] identifier[r] :
keyword[if] keyword[not] ( identifier[min] <= identifier[v] <= identifier[max] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def contains(self, clr):
"""
Returns True if the given color is part of this color range.
Check whether each h, s, b, a component of the color
falls within the defined range for that component.
If the given color is grayscale,
checks against the definitions for black and white.
"""
if not isinstance(clr, Color):
return False # depends on [control=['if'], data=[]]
if not isinstance(clr, _list):
clr = [clr] # depends on [control=['if'], data=[]]
for clr in clr:
if clr.is_grey and (not self.grayscale):
return self.black.contains(clr) or self.white.contains(clr) # depends on [control=['if'], data=[]]
for (r, v) in [(self.h, clr.h), (self.s, clr.s), (self.b, clr.brightness), (self.a, clr.a)]:
if isinstance(r, _list):
pass # depends on [control=['if'], data=[]]
elif isinstance(r, tuple):
r = [r] # depends on [control=['if'], data=[]]
else:
r = [(r, r)]
for (min, max) in r:
if not min <= v <= max:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['clr']]
return True |
def _get_code(data, position, obj_end, opts, element_name):
"""Decode a BSON code to bson.code.Code."""
code, position = _get_string(data, position, obj_end, opts, element_name)
return Code(code), position | def function[_get_code, parameter[data, position, obj_end, opts, element_name]]:
constant[Decode a BSON code to bson.code.Code.]
<ast.Tuple object at 0x7da20c6a9ed0> assign[=] call[name[_get_string], parameter[name[data], name[position], name[obj_end], name[opts], name[element_name]]]
return[tuple[[<ast.Call object at 0x7da20c6aba30>, <ast.Name object at 0x7da20c6ab2e0>]]] | keyword[def] identifier[_get_code] ( identifier[data] , identifier[position] , identifier[obj_end] , identifier[opts] , identifier[element_name] ):
literal[string]
identifier[code] , identifier[position] = identifier[_get_string] ( identifier[data] , identifier[position] , identifier[obj_end] , identifier[opts] , identifier[element_name] )
keyword[return] identifier[Code] ( identifier[code] ), identifier[position] | def _get_code(data, position, obj_end, opts, element_name):
"""Decode a BSON code to bson.code.Code."""
(code, position) = _get_string(data, position, obj_end, opts, element_name)
return (Code(code), position) |
def _calcSkipRecords(numIngested, windowSize, learningPeriod):
"""Return the value of skipRecords for passing to estimateAnomalyLikelihoods
If `windowSize` is very large (bigger than the amount of data) then this
could just return `learningPeriod`. But when some values have fallen out of
the historical sliding window of anomaly records, then we have to take those
into account as well so we return the `learningPeriod` minus the number
shifted out.
:param numIngested - (int) number of data points that have been added to the
sliding window of historical data points.
:param windowSize - (int) size of sliding window of historical data points.
:param learningPeriod - (int) the number of iterations required for the
algorithm to learn the basic patterns in the dataset and for the anomaly
score to 'settle down'.
"""
numShiftedOut = max(0, numIngested - windowSize)
return min(numIngested, max(0, learningPeriod - numShiftedOut)) | def function[_calcSkipRecords, parameter[numIngested, windowSize, learningPeriod]]:
constant[Return the value of skipRecords for passing to estimateAnomalyLikelihoods
If `windowSize` is very large (bigger than the amount of data) then this
could just return `learningPeriod`. But when some values have fallen out of
the historical sliding window of anomaly records, then we have to take those
into account as well so we return the `learningPeriod` minus the number
shifted out.
:param numIngested - (int) number of data points that have been added to the
sliding window of historical data points.
:param windowSize - (int) size of sliding window of historical data points.
:param learningPeriod - (int) the number of iterations required for the
algorithm to learn the basic patterns in the dataset and for the anomaly
score to 'settle down'.
]
variable[numShiftedOut] assign[=] call[name[max], parameter[constant[0], binary_operation[name[numIngested] - name[windowSize]]]]
return[call[name[min], parameter[name[numIngested], call[name[max], parameter[constant[0], binary_operation[name[learningPeriod] - name[numShiftedOut]]]]]]] | keyword[def] identifier[_calcSkipRecords] ( identifier[numIngested] , identifier[windowSize] , identifier[learningPeriod] ):
literal[string]
identifier[numShiftedOut] = identifier[max] ( literal[int] , identifier[numIngested] - identifier[windowSize] )
keyword[return] identifier[min] ( identifier[numIngested] , identifier[max] ( literal[int] , identifier[learningPeriod] - identifier[numShiftedOut] )) | def _calcSkipRecords(numIngested, windowSize, learningPeriod):
"""Return the value of skipRecords for passing to estimateAnomalyLikelihoods
If `windowSize` is very large (bigger than the amount of data) then this
could just return `learningPeriod`. But when some values have fallen out of
the historical sliding window of anomaly records, then we have to take those
into account as well so we return the `learningPeriod` minus the number
shifted out.
:param numIngested - (int) number of data points that have been added to the
sliding window of historical data points.
:param windowSize - (int) size of sliding window of historical data points.
:param learningPeriod - (int) the number of iterations required for the
algorithm to learn the basic patterns in the dataset and for the anomaly
score to 'settle down'.
"""
numShiftedOut = max(0, numIngested - windowSize)
return min(numIngested, max(0, learningPeriod - numShiftedOut)) |
def lookup_job_tasks(self,
statuses,
user_ids=None,
job_ids=None,
job_names=None,
task_ids=None,
task_attempts=None,
labels=None,
create_time_min=None,
create_time_max=None,
max_tasks=0):
"""Return a list of operations. See base.py for additional detail."""
statuses = None if statuses == {'*'} else statuses
user_ids = None if user_ids == {'*'} else user_ids
job_ids = None if job_ids == {'*'} else job_ids
job_names = None if job_names == {'*'} else job_names
task_ids = None if task_ids == {'*'} else task_ids
task_attempts = None if task_attempts == {'*'} else task_attempts
if labels or create_time_min or create_time_max:
raise NotImplementedError(
'Lookup by labels and create_time not yet supported by stub.')
operations = [
x for x in self._operations
if ((not statuses or x.get_field('status', (None, None))[0] in statuses
) and (not user_ids or x.get_field('user', None) in user_ids) and
(not job_ids or x.get_field('job-id', None) in job_ids) and
(not job_names or x.get_field('job-name', None) in job_names) and
(not task_ids or x.get_field('task-id', None) in task_ids) and
(not task_attempts or
x.get_field('task-attempt', None) in task_attempts))
]
if max_tasks > 0:
operations = operations[:max_tasks]
return operations | def function[lookup_job_tasks, parameter[self, statuses, user_ids, job_ids, job_names, task_ids, task_attempts, labels, create_time_min, create_time_max, max_tasks]]:
constant[Return a list of operations. See base.py for additional detail.]
variable[statuses] assign[=] <ast.IfExp object at 0x7da1b012c0d0>
variable[user_ids] assign[=] <ast.IfExp object at 0x7da1b012e860>
variable[job_ids] assign[=] <ast.IfExp object at 0x7da1b012f250>
variable[job_names] assign[=] <ast.IfExp object at 0x7da1b012caf0>
variable[task_ids] assign[=] <ast.IfExp object at 0x7da1b012d0f0>
variable[task_attempts] assign[=] <ast.IfExp object at 0x7da1b012e080>
if <ast.BoolOp object at 0x7da1b012f6a0> begin[:]
<ast.Raise object at 0x7da1b012c400>
variable[operations] assign[=] <ast.ListComp object at 0x7da1b012fc10>
if compare[name[max_tasks] greater[>] constant[0]] begin[:]
variable[operations] assign[=] call[name[operations]][<ast.Slice object at 0x7da1b012e0b0>]
return[name[operations]] | keyword[def] identifier[lookup_job_tasks] ( identifier[self] ,
identifier[statuses] ,
identifier[user_ids] = keyword[None] ,
identifier[job_ids] = keyword[None] ,
identifier[job_names] = keyword[None] ,
identifier[task_ids] = keyword[None] ,
identifier[task_attempts] = keyword[None] ,
identifier[labels] = keyword[None] ,
identifier[create_time_min] = keyword[None] ,
identifier[create_time_max] = keyword[None] ,
identifier[max_tasks] = literal[int] ):
literal[string]
identifier[statuses] = keyword[None] keyword[if] identifier[statuses] =={ literal[string] } keyword[else] identifier[statuses]
identifier[user_ids] = keyword[None] keyword[if] identifier[user_ids] =={ literal[string] } keyword[else] identifier[user_ids]
identifier[job_ids] = keyword[None] keyword[if] identifier[job_ids] =={ literal[string] } keyword[else] identifier[job_ids]
identifier[job_names] = keyword[None] keyword[if] identifier[job_names] =={ literal[string] } keyword[else] identifier[job_names]
identifier[task_ids] = keyword[None] keyword[if] identifier[task_ids] =={ literal[string] } keyword[else] identifier[task_ids]
identifier[task_attempts] = keyword[None] keyword[if] identifier[task_attempts] =={ literal[string] } keyword[else] identifier[task_attempts]
keyword[if] identifier[labels] keyword[or] identifier[create_time_min] keyword[or] identifier[create_time_max] :
keyword[raise] identifier[NotImplementedError] (
literal[string] )
identifier[operations] =[
identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[_operations]
keyword[if] (( keyword[not] identifier[statuses] keyword[or] identifier[x] . identifier[get_field] ( literal[string] ,( keyword[None] , keyword[None] ))[ literal[int] ] keyword[in] identifier[statuses]
) keyword[and] ( keyword[not] identifier[user_ids] keyword[or] identifier[x] . identifier[get_field] ( literal[string] , keyword[None] ) keyword[in] identifier[user_ids] ) keyword[and]
( keyword[not] identifier[job_ids] keyword[or] identifier[x] . identifier[get_field] ( literal[string] , keyword[None] ) keyword[in] identifier[job_ids] ) keyword[and]
( keyword[not] identifier[job_names] keyword[or] identifier[x] . identifier[get_field] ( literal[string] , keyword[None] ) keyword[in] identifier[job_names] ) keyword[and]
( keyword[not] identifier[task_ids] keyword[or] identifier[x] . identifier[get_field] ( literal[string] , keyword[None] ) keyword[in] identifier[task_ids] ) keyword[and]
( keyword[not] identifier[task_attempts] keyword[or]
identifier[x] . identifier[get_field] ( literal[string] , keyword[None] ) keyword[in] identifier[task_attempts] ))
]
keyword[if] identifier[max_tasks] > literal[int] :
identifier[operations] = identifier[operations] [: identifier[max_tasks] ]
keyword[return] identifier[operations] | def lookup_job_tasks(self, statuses, user_ids=None, job_ids=None, job_names=None, task_ids=None, task_attempts=None, labels=None, create_time_min=None, create_time_max=None, max_tasks=0):
"""Return a list of operations. See base.py for additional detail."""
statuses = None if statuses == {'*'} else statuses
user_ids = None if user_ids == {'*'} else user_ids
job_ids = None if job_ids == {'*'} else job_ids
job_names = None if job_names == {'*'} else job_names
task_ids = None if task_ids == {'*'} else task_ids
task_attempts = None if task_attempts == {'*'} else task_attempts
if labels or create_time_min or create_time_max:
raise NotImplementedError('Lookup by labels and create_time not yet supported by stub.') # depends on [control=['if'], data=[]]
operations = [x for x in self._operations if (not statuses or x.get_field('status', (None, None))[0] in statuses) and (not user_ids or x.get_field('user', None) in user_ids) and (not job_ids or x.get_field('job-id', None) in job_ids) and (not job_names or x.get_field('job-name', None) in job_names) and (not task_ids or x.get_field('task-id', None) in task_ids) and (not task_attempts or x.get_field('task-attempt', None) in task_attempts)]
if max_tasks > 0:
operations = operations[:max_tasks] # depends on [control=['if'], data=['max_tasks']]
return operations |
def serve(name: str = "", port: int = 5000) -> None:
"""
A basic way to serve the methods.
Args:
name: Server address.
port: Server port.
"""
logging.info(" * Listening on port %s", port)
httpd = HTTPServer((name, port), RequestHandler)
httpd.serve_forever() | def function[serve, parameter[name, port]]:
constant[
A basic way to serve the methods.
Args:
name: Server address.
port: Server port.
]
call[name[logging].info, parameter[constant[ * Listening on port %s], name[port]]]
variable[httpd] assign[=] call[name[HTTPServer], parameter[tuple[[<ast.Name object at 0x7da1b0774d60>, <ast.Name object at 0x7da1b0775f30>]], name[RequestHandler]]]
call[name[httpd].serve_forever, parameter[]] | keyword[def] identifier[serve] ( identifier[name] : identifier[str] = literal[string] , identifier[port] : identifier[int] = literal[int] )-> keyword[None] :
literal[string]
identifier[logging] . identifier[info] ( literal[string] , identifier[port] )
identifier[httpd] = identifier[HTTPServer] (( identifier[name] , identifier[port] ), identifier[RequestHandler] )
identifier[httpd] . identifier[serve_forever] () | def serve(name: str='', port: int=5000) -> None:
"""
A basic way to serve the methods.
Args:
name: Server address.
port: Server port.
"""
logging.info(' * Listening on port %s', port)
httpd = HTTPServer((name, port), RequestHandler)
httpd.serve_forever() |
def int_to_var_bytes(x):
"""Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert
"""
if x < 253:
return intbytes.to_bytes(x, 1)
elif x < 65536:
return bytearray([253]) + intbytes.to_bytes(x, 2)[::-1]
elif x < 4294967296:
return bytearray([254]) + intbytes.to_bytes(x, 4)[::-1]
else:
return bytearray([255]) + intbytes.to_bytes(x, 8)[::-1] | def function[int_to_var_bytes, parameter[x]]:
constant[Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert
]
if compare[name[x] less[<] constant[253]] begin[:]
return[call[name[intbytes].to_bytes, parameter[name[x], constant[1]]]] | keyword[def] identifier[int_to_var_bytes] ( identifier[x] ):
literal[string]
keyword[if] identifier[x] < literal[int] :
keyword[return] identifier[intbytes] . identifier[to_bytes] ( identifier[x] , literal[int] )
keyword[elif] identifier[x] < literal[int] :
keyword[return] identifier[bytearray] ([ literal[int] ])+ identifier[intbytes] . identifier[to_bytes] ( identifier[x] , literal[int] )[::- literal[int] ]
keyword[elif] identifier[x] < literal[int] :
keyword[return] identifier[bytearray] ([ literal[int] ])+ identifier[intbytes] . identifier[to_bytes] ( identifier[x] , literal[int] )[::- literal[int] ]
keyword[else] :
keyword[return] identifier[bytearray] ([ literal[int] ])+ identifier[intbytes] . identifier[to_bytes] ( identifier[x] , literal[int] )[::- literal[int] ] | def int_to_var_bytes(x):
"""Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert
"""
if x < 253:
return intbytes.to_bytes(x, 1) # depends on [control=['if'], data=['x']]
elif x < 65536:
return bytearray([253]) + intbytes.to_bytes(x, 2)[::-1] # depends on [control=['if'], data=['x']]
elif x < 4294967296:
return bytearray([254]) + intbytes.to_bytes(x, 4)[::-1] # depends on [control=['if'], data=['x']]
else:
return bytearray([255]) + intbytes.to_bytes(x, 8)[::-1] |
def _check_elements_equal(lst):
"""
Returns true if all of the elements in the list are equal.
"""
assert isinstance(lst, list), "Input value must be a list."
return not lst or lst.count(lst[0]) == len(lst) | def function[_check_elements_equal, parameter[lst]]:
constant[
Returns true if all of the elements in the list are equal.
]
assert[call[name[isinstance], parameter[name[lst], name[list]]]]
return[<ast.BoolOp object at 0x7da1b1f8ef50>] | keyword[def] identifier[_check_elements_equal] ( identifier[lst] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[lst] , identifier[list] ), literal[string]
keyword[return] keyword[not] identifier[lst] keyword[or] identifier[lst] . identifier[count] ( identifier[lst] [ literal[int] ])== identifier[len] ( identifier[lst] ) | def _check_elements_equal(lst):
"""
Returns true if all of the elements in the list are equal.
"""
assert isinstance(lst, list), 'Input value must be a list.'
return not lst or lst.count(lst[0]) == len(lst) |
def from_sphinx(cls, app):
"""Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application"""
app.config.html_static_path.append(os.path.join(
os.path.dirname(__file__), '_static'))
config = app.config.example_gallery_config
insert_bokeh = config.get('insert_bokeh')
if insert_bokeh:
if not isstring(insert_bokeh):
import bokeh
insert_bokeh = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_STYLE_SHEET.format(
version=insert_bokeh))
app.add_javascript(
NotebookProcessor.BOKEH_JS.format(version=insert_bokeh))
insert_bokeh_widgets = config.get('insert_bokeh_widgets')
if insert_bokeh_widgets:
if not isstring(insert_bokeh_widgets):
import bokeh
insert_bokeh_widgets = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_WIDGETS_STYLE_SHEET.format(
version=insert_bokeh_widgets))
app.add_javascript(
NotebookProcessor.BOKEH_WIDGETS_JS.format(
version=insert_bokeh_widgets))
if not app.config.process_examples:
return
cls(**app.config.example_gallery_config).process_directories() | def function[from_sphinx, parameter[cls, app]]:
constant[Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application]
call[name[app].config.html_static_path.append, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[_static]]]]]
variable[config] assign[=] name[app].config.example_gallery_config
variable[insert_bokeh] assign[=] call[name[config].get, parameter[constant[insert_bokeh]]]
if name[insert_bokeh] begin[:]
if <ast.UnaryOp object at 0x7da207f9a530> begin[:]
import module[bokeh]
variable[insert_bokeh] assign[=] name[bokeh].__version__
call[name[app].add_stylesheet, parameter[call[name[NotebookProcessor].BOKEH_STYLE_SHEET.format, parameter[]]]]
call[name[app].add_javascript, parameter[call[name[NotebookProcessor].BOKEH_JS.format, parameter[]]]]
variable[insert_bokeh_widgets] assign[=] call[name[config].get, parameter[constant[insert_bokeh_widgets]]]
if name[insert_bokeh_widgets] begin[:]
if <ast.UnaryOp object at 0x7da207f99c30> begin[:]
import module[bokeh]
variable[insert_bokeh_widgets] assign[=] name[bokeh].__version__
call[name[app].add_stylesheet, parameter[call[name[NotebookProcessor].BOKEH_WIDGETS_STYLE_SHEET.format, parameter[]]]]
call[name[app].add_javascript, parameter[call[name[NotebookProcessor].BOKEH_WIDGETS_JS.format, parameter[]]]]
if <ast.UnaryOp object at 0x7da20c6a9600> begin[:]
return[None]
call[call[name[cls], parameter[]].process_directories, parameter[]] | keyword[def] identifier[from_sphinx] ( identifier[cls] , identifier[app] ):
literal[string]
identifier[app] . identifier[config] . identifier[html_static_path] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ))
identifier[config] = identifier[app] . identifier[config] . identifier[example_gallery_config]
identifier[insert_bokeh] = identifier[config] . identifier[get] ( literal[string] )
keyword[if] identifier[insert_bokeh] :
keyword[if] keyword[not] identifier[isstring] ( identifier[insert_bokeh] ):
keyword[import] identifier[bokeh]
identifier[insert_bokeh] = identifier[bokeh] . identifier[__version__]
identifier[app] . identifier[add_stylesheet] (
identifier[NotebookProcessor] . identifier[BOKEH_STYLE_SHEET] . identifier[format] (
identifier[version] = identifier[insert_bokeh] ))
identifier[app] . identifier[add_javascript] (
identifier[NotebookProcessor] . identifier[BOKEH_JS] . identifier[format] ( identifier[version] = identifier[insert_bokeh] ))
identifier[insert_bokeh_widgets] = identifier[config] . identifier[get] ( literal[string] )
keyword[if] identifier[insert_bokeh_widgets] :
keyword[if] keyword[not] identifier[isstring] ( identifier[insert_bokeh_widgets] ):
keyword[import] identifier[bokeh]
identifier[insert_bokeh_widgets] = identifier[bokeh] . identifier[__version__]
identifier[app] . identifier[add_stylesheet] (
identifier[NotebookProcessor] . identifier[BOKEH_WIDGETS_STYLE_SHEET] . identifier[format] (
identifier[version] = identifier[insert_bokeh_widgets] ))
identifier[app] . identifier[add_javascript] (
identifier[NotebookProcessor] . identifier[BOKEH_WIDGETS_JS] . identifier[format] (
identifier[version] = identifier[insert_bokeh_widgets] ))
keyword[if] keyword[not] identifier[app] . identifier[config] . identifier[process_examples] :
keyword[return]
identifier[cls] (** identifier[app] . identifier[config] . identifier[example_gallery_config] ). identifier[process_directories] () | def from_sphinx(cls, app):
"""Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application"""
app.config.html_static_path.append(os.path.join(os.path.dirname(__file__), '_static'))
config = app.config.example_gallery_config
insert_bokeh = config.get('insert_bokeh')
if insert_bokeh:
if not isstring(insert_bokeh):
import bokeh
insert_bokeh = bokeh.__version__ # depends on [control=['if'], data=[]]
app.add_stylesheet(NotebookProcessor.BOKEH_STYLE_SHEET.format(version=insert_bokeh))
app.add_javascript(NotebookProcessor.BOKEH_JS.format(version=insert_bokeh)) # depends on [control=['if'], data=[]]
insert_bokeh_widgets = config.get('insert_bokeh_widgets')
if insert_bokeh_widgets:
if not isstring(insert_bokeh_widgets):
import bokeh
insert_bokeh_widgets = bokeh.__version__ # depends on [control=['if'], data=[]]
app.add_stylesheet(NotebookProcessor.BOKEH_WIDGETS_STYLE_SHEET.format(version=insert_bokeh_widgets))
app.add_javascript(NotebookProcessor.BOKEH_WIDGETS_JS.format(version=insert_bokeh_widgets)) # depends on [control=['if'], data=[]]
if not app.config.process_examples:
return # depends on [control=['if'], data=[]]
cls(**app.config.example_gallery_config).process_directories() |
def AND(cpu, dest, src):
"""
Logical AND.
Performs a bitwise AND operation on the destination (first) and source
(second) operands and stores the result in the destination operand location.
Each bit of the result is set to 1 if both corresponding bits of the first and
second operands are 1; otherwise, it is set to 0.
The OF and CF flags are cleared; the SF, ZF, and PF flags are set according to the result::
DEST = DEST AND SRC;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
"""
# XXX bypass a capstone bug that incorrectly extends and computes operands sizes
# the bug has been fixed since capstone 4.0.alpha2 (commit de8dd26)
if src.size == 64 and src.type == 'immediate' and dest.size == 64:
arg1 = Operators.SEXTEND(src.read(), 32, 64)
else:
arg1 = src.read()
res = dest.write(dest.read() & arg1)
# Defined Flags: szp
cpu._calculate_logic_flags(dest.size, res) | def function[AND, parameter[cpu, dest, src]]:
constant[
Logical AND.
Performs a bitwise AND operation on the destination (first) and source
(second) operands and stores the result in the destination operand location.
Each bit of the result is set to 1 if both corresponding bits of the first and
second operands are 1; otherwise, it is set to 0.
The OF and CF flags are cleared; the SF, ZF, and PF flags are set according to the result::
DEST = DEST AND SRC;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
]
if <ast.BoolOp object at 0x7da1b00865f0> begin[:]
variable[arg1] assign[=] call[name[Operators].SEXTEND, parameter[call[name[src].read, parameter[]], constant[32], constant[64]]]
variable[res] assign[=] call[name[dest].write, parameter[binary_operation[call[name[dest].read, parameter[]] <ast.BitAnd object at 0x7da2590d6b60> name[arg1]]]]
call[name[cpu]._calculate_logic_flags, parameter[name[dest].size, name[res]]] | keyword[def] identifier[AND] ( identifier[cpu] , identifier[dest] , identifier[src] ):
literal[string]
keyword[if] identifier[src] . identifier[size] == literal[int] keyword[and] identifier[src] . identifier[type] == literal[string] keyword[and] identifier[dest] . identifier[size] == literal[int] :
identifier[arg1] = identifier[Operators] . identifier[SEXTEND] ( identifier[src] . identifier[read] (), literal[int] , literal[int] )
keyword[else] :
identifier[arg1] = identifier[src] . identifier[read] ()
identifier[res] = identifier[dest] . identifier[write] ( identifier[dest] . identifier[read] ()& identifier[arg1] )
identifier[cpu] . identifier[_calculate_logic_flags] ( identifier[dest] . identifier[size] , identifier[res] ) | def AND(cpu, dest, src):
"""
Logical AND.
Performs a bitwise AND operation on the destination (first) and source
(second) operands and stores the result in the destination operand location.
Each bit of the result is set to 1 if both corresponding bits of the first and
second operands are 1; otherwise, it is set to 0.
The OF and CF flags are cleared; the SF, ZF, and PF flags are set according to the result::
DEST = DEST AND SRC;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
"""
# XXX bypass a capstone bug that incorrectly extends and computes operands sizes
# the bug has been fixed since capstone 4.0.alpha2 (commit de8dd26)
if src.size == 64 and src.type == 'immediate' and (dest.size == 64):
arg1 = Operators.SEXTEND(src.read(), 32, 64) # depends on [control=['if'], data=[]]
else:
arg1 = src.read()
res = dest.write(dest.read() & arg1)
# Defined Flags: szp
cpu._calculate_logic_flags(dest.size, res) |
def query(self, dataset_key, query, query_type="sql", parameters=None):
"""Query an existing dataset
:param dataset_key: Dataset identifier, in the form of owner/id or of
a url
:type dataset_key: str
:param query: SQL or SPARQL query
:type query: str
:param query_type: The type of the query. Must be either 'sql' or
'sparql'. (Default value = "sql")
:type query_type: {'sql', 'sparql'}, optional
:param parameters: parameters to the query - if SPARQL query, this
should be a dict containing named parameters, if SQL query,then
this should be a list containing positional parameters.
Boolean values will be converted to xsd:boolean, Integer values to
xsd:integer, and other Numeric values to xsd:decimal. Anything
else is treated as a String literal (Default value = None)
:type parameters: query parameters, optional
:returns: Object containing the results of the query
:rtype: Results
:raises RuntimeError: If a server error occurs
"""
# TODO Move network request to RestApiClient
owner_id, dataset_id = parse_dataset_key(dataset_key)
params = {
"query": query
}
if parameters and query_type == "sparql":
# if SPARQL, then the parameters should be a Mapping containing
# named parameters
params["parameters"] = ",".join(
["{}={}".format(k, convert_to_sparql_literal(parameters[k]))
for k in parameters.keys()])
elif parameters and query_type == "sql":
# if SQL, then the parameters should be an array with positional
# parameters, need to unwind them to $data_world_paramN for each
# 0-indexed position N
parameters = {"$data_world_param{}".format(i): x
for i, x in enumerate(parameters)}
params["parameters"] = ",".join(["{}={}".format(
k, convert_to_sparql_literal(parameters[k]))
for k in parameters.keys()])
url = "{0}://{1}/{2}/{3}/{4}".format(self._protocol, self._query_host,
query_type, owner_id, dataset_id)
headers = {
'User-Agent': _user_agent(),
'Accept': 'application/sparql-results+json',
'Authorization': 'Bearer {0}'.format(self._config.auth_token)
}
response = requests.get(url, params=params, headers=headers)
if response.status_code == 200:
return QueryResults(response.json())
raise RuntimeError(
'Error executing query: {}'.format(response.content)) | def function[query, parameter[self, dataset_key, query, query_type, parameters]]:
constant[Query an existing dataset
:param dataset_key: Dataset identifier, in the form of owner/id or of
a url
:type dataset_key: str
:param query: SQL or SPARQL query
:type query: str
:param query_type: The type of the query. Must be either 'sql' or
'sparql'. (Default value = "sql")
:type query_type: {'sql', 'sparql'}, optional
:param parameters: parameters to the query - if SPARQL query, this
should be a dict containing named parameters, if SQL query,then
this should be a list containing positional parameters.
Boolean values will be converted to xsd:boolean, Integer values to
xsd:integer, and other Numeric values to xsd:decimal. Anything
else is treated as a String literal (Default value = None)
:type parameters: query parameters, optional
:returns: Object containing the results of the query
:rtype: Results
:raises RuntimeError: If a server error occurs
]
<ast.Tuple object at 0x7da204960190> assign[=] call[name[parse_dataset_key], parameter[name[dataset_key]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204960070>], [<ast.Name object at 0x7da204963c70>]]
if <ast.BoolOp object at 0x7da204961db0> begin[:]
call[name[params]][constant[parameters]] assign[=] call[constant[,].join, parameter[<ast.ListComp object at 0x7da204961c90>]]
variable[url] assign[=] call[constant[{0}://{1}/{2}/{3}/{4}].format, parameter[name[self]._protocol, name[self]._query_host, name[query_type], name[owner_id], name[dataset_id]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18ede5330>, <ast.Constant object at 0x7da18ede7220>, <ast.Constant object at 0x7da18ede6470>], [<ast.Call object at 0x7da18ede4460>, <ast.Constant object at 0x7da18ede4a90>, <ast.Call object at 0x7da18ede5f00>]]
variable[response] assign[=] call[name[requests].get, parameter[name[url]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
return[call[name[QueryResults], parameter[call[name[response].json, parameter[]]]]]
<ast.Raise object at 0x7da18ede7b50> | keyword[def] identifier[query] ( identifier[self] , identifier[dataset_key] , identifier[query] , identifier[query_type] = literal[string] , identifier[parameters] = keyword[None] ):
literal[string]
identifier[owner_id] , identifier[dataset_id] = identifier[parse_dataset_key] ( identifier[dataset_key] )
identifier[params] ={
literal[string] : identifier[query]
}
keyword[if] identifier[parameters] keyword[and] identifier[query_type] == literal[string] :
identifier[params] [ literal[string] ]= literal[string] . identifier[join] (
[ literal[string] . identifier[format] ( identifier[k] , identifier[convert_to_sparql_literal] ( identifier[parameters] [ identifier[k] ]))
keyword[for] identifier[k] keyword[in] identifier[parameters] . identifier[keys] ()])
keyword[elif] identifier[parameters] keyword[and] identifier[query_type] == literal[string] :
identifier[parameters] ={ literal[string] . identifier[format] ( identifier[i] ): identifier[x]
keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[parameters] )}
identifier[params] [ literal[string] ]= literal[string] . identifier[join] ([ literal[string] . identifier[format] (
identifier[k] , identifier[convert_to_sparql_literal] ( identifier[parameters] [ identifier[k] ]))
keyword[for] identifier[k] keyword[in] identifier[parameters] . identifier[keys] ()])
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[_protocol] , identifier[self] . identifier[_query_host] ,
identifier[query_type] , identifier[owner_id] , identifier[dataset_id] )
identifier[headers] ={
literal[string] : identifier[_user_agent] (),
literal[string] : literal[string] ,
literal[string] : literal[string] . identifier[format] ( identifier[self] . identifier[_config] . identifier[auth_token] )
}
identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[headers] = identifier[headers] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[QueryResults] ( identifier[response] . identifier[json] ())
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[response] . identifier[content] )) | def query(self, dataset_key, query, query_type='sql', parameters=None):
"""Query an existing dataset
:param dataset_key: Dataset identifier, in the form of owner/id or of
a url
:type dataset_key: str
:param query: SQL or SPARQL query
:type query: str
:param query_type: The type of the query. Must be either 'sql' or
'sparql'. (Default value = "sql")
:type query_type: {'sql', 'sparql'}, optional
:param parameters: parameters to the query - if SPARQL query, this
should be a dict containing named parameters, if SQL query,then
this should be a list containing positional parameters.
Boolean values will be converted to xsd:boolean, Integer values to
xsd:integer, and other Numeric values to xsd:decimal. Anything
else is treated as a String literal (Default value = None)
:type parameters: query parameters, optional
:returns: Object containing the results of the query
:rtype: Results
:raises RuntimeError: If a server error occurs
"""
# TODO Move network request to RestApiClient
(owner_id, dataset_id) = parse_dataset_key(dataset_key)
params = {'query': query}
if parameters and query_type == 'sparql':
# if SPARQL, then the parameters should be a Mapping containing
# named parameters
params['parameters'] = ','.join(['{}={}'.format(k, convert_to_sparql_literal(parameters[k])) for k in parameters.keys()]) # depends on [control=['if'], data=[]]
elif parameters and query_type == 'sql':
# if SQL, then the parameters should be an array with positional
# parameters, need to unwind them to $data_world_paramN for each
# 0-indexed position N
parameters = {'$data_world_param{}'.format(i): x for (i, x) in enumerate(parameters)}
params['parameters'] = ','.join(['{}={}'.format(k, convert_to_sparql_literal(parameters[k])) for k in parameters.keys()]) # depends on [control=['if'], data=[]]
url = '{0}://{1}/{2}/{3}/{4}'.format(self._protocol, self._query_host, query_type, owner_id, dataset_id)
headers = {'User-Agent': _user_agent(), 'Accept': 'application/sparql-results+json', 'Authorization': 'Bearer {0}'.format(self._config.auth_token)}
response = requests.get(url, params=params, headers=headers)
if response.status_code == 200:
return QueryResults(response.json()) # depends on [control=['if'], data=[]]
raise RuntimeError('Error executing query: {}'.format(response.content)) |
def verify_contracts():
"""
Verify that the contracts are deployed correctly in the network.
:raise Exception: raise exception if the contracts are not deployed correctly.
"""
artifacts_path = ConfigProvider.get_config().keeper_path
logger.info(f'Keeper contract artifacts (JSON abi files) at: {artifacts_path}')
if os.environ.get('KEEPER_NETWORK_NAME'):
logger.warning(f'The `KEEPER_NETWORK_NAME` env var is set to '
f'{os.environ.get("KEEPER_NETWORK_NAME")}. '
f'This enables the user to override the method of how the network name '
f'is inferred from network id.')
# try to find contract with this network name
contract_name = Diagnostics.TEST_CONTRACT_NAME
network_id = Keeper.get_network_id()
network_name = Keeper.get_network_name(network_id)
logger.info(f'Using keeper contracts from network {network_name}, '
f'network id is {network_id}')
logger.info(f'Looking for keeper contracts ending with ".{network_name}.json", '
f'e.g. "{contract_name}.{network_name}.json".')
existing_contract_names = os.listdir(artifacts_path)
try:
ContractHandler.get(contract_name)
except Exception as e:
logger.error(e)
logger.error(f'Cannot find the keeper contracts. \n'
f'Current network id is {network_id} and network name is {network_name}.'
f'Expected to find contracts ending with ".{network_name}.json",'
f' e.g. "{contract_name}.{network_name}.json"')
raise OceanKeeperContractsNotFound(
f'Keeper contracts for keeper network {network_name} were not found '
f'in {artifacts_path}. \n'
f'Found the following contracts: \n\t{existing_contract_names}'
)
keeper = Keeper.get_instance()
contracts = [keeper.dispenser, keeper.token, keeper.did_registry,
keeper.agreement_manager, keeper.template_manager, keeper.condition_manager,
keeper.access_secret_store_condition, keeper.sign_condition,
keeper.lock_reward_condition, keeper.escrow_access_secretstore_template,
keeper.escrow_reward_condition, keeper.hash_lock_condition
]
addresses = '\n'.join([f'\t{c.name}: {c.address}' for c in contracts])
logging.info('Finished loading keeper contracts:\n'
'%s', addresses) | def function[verify_contracts, parameter[]]:
constant[
Verify that the contracts are deployed correctly in the network.
:raise Exception: raise exception if the contracts are not deployed correctly.
]
variable[artifacts_path] assign[=] call[name[ConfigProvider].get_config, parameter[]].keeper_path
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da18fe90730>]]
if call[name[os].environ.get, parameter[constant[KEEPER_NETWORK_NAME]]] begin[:]
call[name[logger].warning, parameter[<ast.JoinedStr object at 0x7da18fe93e20>]]
variable[contract_name] assign[=] name[Diagnostics].TEST_CONTRACT_NAME
variable[network_id] assign[=] call[name[Keeper].get_network_id, parameter[]]
variable[network_name] assign[=] call[name[Keeper].get_network_name, parameter[name[network_id]]]
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da18fe91cf0>]]
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da2047eba30>]]
variable[existing_contract_names] assign[=] call[name[os].listdir, parameter[name[artifacts_path]]]
<ast.Try object at 0x7da204623340>
variable[keeper] assign[=] call[name[Keeper].get_instance, parameter[]]
variable[contracts] assign[=] list[[<ast.Attribute object at 0x7da20c76d690>, <ast.Attribute object at 0x7da20c76dc00>, <ast.Attribute object at 0x7da20c76ff70>, <ast.Attribute object at 0x7da20c76f430>, <ast.Attribute object at 0x7da20c76e410>, <ast.Attribute object at 0x7da20c76d810>, <ast.Attribute object at 0x7da20c76d960>, <ast.Attribute object at 0x7da20c76cfa0>, <ast.Attribute object at 0x7da20c76d630>, <ast.Attribute object at 0x7da20c76ddb0>, <ast.Attribute object at 0x7da20c76f3d0>, <ast.Attribute object at 0x7da20c76d3f0>]]
variable[addresses] assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da20c76c970>]]
call[name[logging].info, parameter[constant[Finished loading keeper contracts:
%s], name[addresses]]] | keyword[def] identifier[verify_contracts] ():
literal[string]
identifier[artifacts_path] = identifier[ConfigProvider] . identifier[get_config] (). identifier[keeper_path]
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[os] . identifier[environ] . identifier[get] ( literal[string] ):
identifier[logger] . identifier[warning] ( literal[string]
literal[string]
literal[string]
literal[string] )
identifier[contract_name] = identifier[Diagnostics] . identifier[TEST_CONTRACT_NAME]
identifier[network_id] = identifier[Keeper] . identifier[get_network_id] ()
identifier[network_name] = identifier[Keeper] . identifier[get_network_name] ( identifier[network_id] )
identifier[logger] . identifier[info] ( literal[string]
literal[string] )
identifier[logger] . identifier[info] ( literal[string]
literal[string] )
identifier[existing_contract_names] = identifier[os] . identifier[listdir] ( identifier[artifacts_path] )
keyword[try] :
identifier[ContractHandler] . identifier[get] ( identifier[contract_name] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( identifier[e] )
identifier[logger] . identifier[error] ( literal[string]
literal[string]
literal[string]
literal[string] )
keyword[raise] identifier[OceanKeeperContractsNotFound] (
literal[string]
literal[string]
literal[string]
)
identifier[keeper] = identifier[Keeper] . identifier[get_instance] ()
identifier[contracts] =[ identifier[keeper] . identifier[dispenser] , identifier[keeper] . identifier[token] , identifier[keeper] . identifier[did_registry] ,
identifier[keeper] . identifier[agreement_manager] , identifier[keeper] . identifier[template_manager] , identifier[keeper] . identifier[condition_manager] ,
identifier[keeper] . identifier[access_secret_store_condition] , identifier[keeper] . identifier[sign_condition] ,
identifier[keeper] . identifier[lock_reward_condition] , identifier[keeper] . identifier[escrow_access_secretstore_template] ,
identifier[keeper] . identifier[escrow_reward_condition] , identifier[keeper] . identifier[hash_lock_condition]
]
identifier[addresses] = literal[string] . identifier[join] ([ literal[string] keyword[for] identifier[c] keyword[in] identifier[contracts] ])
identifier[logging] . identifier[info] ( literal[string]
literal[string] , identifier[addresses] ) | def verify_contracts():
"""
Verify that the contracts are deployed correctly in the network.
:raise Exception: raise exception if the contracts are not deployed correctly.
"""
artifacts_path = ConfigProvider.get_config().keeper_path
logger.info(f'Keeper contract artifacts (JSON abi files) at: {artifacts_path}')
if os.environ.get('KEEPER_NETWORK_NAME'):
logger.warning(f"The `KEEPER_NETWORK_NAME` env var is set to {os.environ.get('KEEPER_NETWORK_NAME')}. This enables the user to override the method of how the network name is inferred from network id.") # depends on [control=['if'], data=[]]
# try to find contract with this network name
contract_name = Diagnostics.TEST_CONTRACT_NAME
network_id = Keeper.get_network_id()
network_name = Keeper.get_network_name(network_id)
logger.info(f'Using keeper contracts from network {network_name}, network id is {network_id}')
logger.info(f'Looking for keeper contracts ending with ".{network_name}.json", e.g. "{contract_name}.{network_name}.json".')
existing_contract_names = os.listdir(artifacts_path)
try:
ContractHandler.get(contract_name) # depends on [control=['try'], data=[]]
except Exception as e:
logger.error(e)
logger.error(f'Cannot find the keeper contracts. \nCurrent network id is {network_id} and network name is {network_name}.Expected to find contracts ending with ".{network_name}.json", e.g. "{contract_name}.{network_name}.json"')
raise OceanKeeperContractsNotFound(f'Keeper contracts for keeper network {network_name} were not found in {artifacts_path}. \nFound the following contracts: \n\t{existing_contract_names}') # depends on [control=['except'], data=['e']]
keeper = Keeper.get_instance()
contracts = [keeper.dispenser, keeper.token, keeper.did_registry, keeper.agreement_manager, keeper.template_manager, keeper.condition_manager, keeper.access_secret_store_condition, keeper.sign_condition, keeper.lock_reward_condition, keeper.escrow_access_secretstore_template, keeper.escrow_reward_condition, keeper.hash_lock_condition]
addresses = '\n'.join([f'\t{c.name}: {c.address}' for c in contracts])
logging.info('Finished loading keeper contracts:\n%s', addresses) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.