code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state==self.PENDING
self._state=self.FULFILLED;
self.value = value
for callback in self._callbacks:
try:
callback(value)
except Exception:
# Ignore errors in callbacks
pass
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = [] | def function[fulfill, parameter[self, value]]:
constant[
Fulfill the promise with a given value.
]
assert[compare[name[self]._state equal[==] name[self].PENDING]]
name[self]._state assign[=] name[self].FULFILLED
name[self].value assign[=] name[value]
for taget[name[callback]] in starred[name[self]._callbacks] begin[:]
<ast.Try object at 0x7da1b09676d0>
name[self]._callbacks assign[=] list[[]] | keyword[def] identifier[fulfill] ( identifier[self] , identifier[value] ):
literal[string]
keyword[assert] identifier[self] . identifier[_state] == identifier[self] . identifier[PENDING]
identifier[self] . identifier[_state] = identifier[self] . identifier[FULFILLED] ;
identifier[self] . identifier[value] = identifier[value]
keyword[for] identifier[callback] keyword[in] identifier[self] . identifier[_callbacks] :
keyword[try] :
identifier[callback] ( identifier[value] )
keyword[except] identifier[Exception] :
keyword[pass]
identifier[self] . identifier[_callbacks] =[] | def fulfill(self, value):
"""
Fulfill the promise with a given value.
"""
assert self._state == self.PENDING
self._state = self.FULFILLED
self.value = value
for callback in self._callbacks:
try:
callback(value) # depends on [control=['try'], data=[]]
except Exception:
# Ignore errors in callbacks
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['callback']]
# We will never call these callbacks again, so allow
# them to be garbage collected. This is important since
# they probably include closures which are binding variables
# that might otherwise be garbage collected.
self._callbacks = [] |
def _sqlalchemy_on_connection_close(self):
"""
Rollsback and closes the active session, since the client disconnected before the request
could be completed.
"""
if hasattr(self, "_db_conns"):
try:
for db_conn in self._db_conns.values():
db_conn.rollback()
except:
tornado.log.app_log.warning("Error occurred during database transaction cleanup: %s", str(sys.exc_info()[0]))
raise
finally:
for db_conn in self._db_conns.values():
try:
db_conn.close()
except:
tornado.log.app_log.warning("Error occurred when closing the database connection", exc_info=True) | def function[_sqlalchemy_on_connection_close, parameter[self]]:
constant[
Rollsback and closes the active session, since the client disconnected before the request
could be completed.
]
if call[name[hasattr], parameter[name[self], constant[_db_conns]]] begin[:]
<ast.Try object at 0x7da1b0bdb880> | keyword[def] identifier[_sqlalchemy_on_connection_close] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[try] :
keyword[for] identifier[db_conn] keyword[in] identifier[self] . identifier[_db_conns] . identifier[values] ():
identifier[db_conn] . identifier[rollback] ()
keyword[except] :
identifier[tornado] . identifier[log] . identifier[app_log] . identifier[warning] ( literal[string] , identifier[str] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ]))
keyword[raise]
keyword[finally] :
keyword[for] identifier[db_conn] keyword[in] identifier[self] . identifier[_db_conns] . identifier[values] ():
keyword[try] :
identifier[db_conn] . identifier[close] ()
keyword[except] :
identifier[tornado] . identifier[log] . identifier[app_log] . identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] ) | def _sqlalchemy_on_connection_close(self):
"""
Rollsback and closes the active session, since the client disconnected before the request
could be completed.
"""
if hasattr(self, '_db_conns'):
try:
for db_conn in self._db_conns.values():
db_conn.rollback() # depends on [control=['for'], data=['db_conn']] # depends on [control=['try'], data=[]]
except:
tornado.log.app_log.warning('Error occurred during database transaction cleanup: %s', str(sys.exc_info()[0]))
raise # depends on [control=['except'], data=[]]
finally:
for db_conn in self._db_conns.values():
try:
db_conn.close() # depends on [control=['try'], data=[]]
except:
tornado.log.app_log.warning('Error occurred when closing the database connection', exc_info=True) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['db_conn']] # depends on [control=['if'], data=[]] |
def load(file_path, parse_line_fn):
"""Loads a text embedding into memory as a numpy matrix.
Args:
file_path: Path to the text embedding file.
parse_line_fn: callback function to parse each file line.
Returns:
A tuple of (list of vocabulary tokens, numpy matrix of embedding vectors).
Raises:
ValueError: if the data in the sstable is inconsistent.
"""
vocabulary = []
embeddings = []
embeddings_dim = None
for line in tf.gfile.GFile(file_path):
token, embedding = parse_line_fn(line)
if not embeddings_dim:
embeddings_dim = len(embedding)
elif embeddings_dim != len(embedding):
raise ValueError(
"Inconsistent embedding dimension detected, %d != %d for token %s",
embeddings_dim, len(embedding), token)
vocabulary.append(token)
embeddings.append(embedding)
return vocabulary, np.array(embeddings) | def function[load, parameter[file_path, parse_line_fn]]:
constant[Loads a text embedding into memory as a numpy matrix.
Args:
file_path: Path to the text embedding file.
parse_line_fn: callback function to parse each file line.
Returns:
A tuple of (list of vocabulary tokens, numpy matrix of embedding vectors).
Raises:
ValueError: if the data in the sstable is inconsistent.
]
variable[vocabulary] assign[=] list[[]]
variable[embeddings] assign[=] list[[]]
variable[embeddings_dim] assign[=] constant[None]
for taget[name[line]] in starred[call[name[tf].gfile.GFile, parameter[name[file_path]]]] begin[:]
<ast.Tuple object at 0x7da1b20ba890> assign[=] call[name[parse_line_fn], parameter[name[line]]]
if <ast.UnaryOp object at 0x7da1b20ba380> begin[:]
variable[embeddings_dim] assign[=] call[name[len], parameter[name[embedding]]]
call[name[vocabulary].append, parameter[name[token]]]
call[name[embeddings].append, parameter[name[embedding]]]
return[tuple[[<ast.Name object at 0x7da1b20bb4c0>, <ast.Call object at 0x7da1b20b9900>]]] | keyword[def] identifier[load] ( identifier[file_path] , identifier[parse_line_fn] ):
literal[string]
identifier[vocabulary] =[]
identifier[embeddings] =[]
identifier[embeddings_dim] = keyword[None]
keyword[for] identifier[line] keyword[in] identifier[tf] . identifier[gfile] . identifier[GFile] ( identifier[file_path] ):
identifier[token] , identifier[embedding] = identifier[parse_line_fn] ( identifier[line] )
keyword[if] keyword[not] identifier[embeddings_dim] :
identifier[embeddings_dim] = identifier[len] ( identifier[embedding] )
keyword[elif] identifier[embeddings_dim] != identifier[len] ( identifier[embedding] ):
keyword[raise] identifier[ValueError] (
literal[string] ,
identifier[embeddings_dim] , identifier[len] ( identifier[embedding] ), identifier[token] )
identifier[vocabulary] . identifier[append] ( identifier[token] )
identifier[embeddings] . identifier[append] ( identifier[embedding] )
keyword[return] identifier[vocabulary] , identifier[np] . identifier[array] ( identifier[embeddings] ) | def load(file_path, parse_line_fn):
"""Loads a text embedding into memory as a numpy matrix.
Args:
file_path: Path to the text embedding file.
parse_line_fn: callback function to parse each file line.
Returns:
A tuple of (list of vocabulary tokens, numpy matrix of embedding vectors).
Raises:
ValueError: if the data in the sstable is inconsistent.
"""
vocabulary = []
embeddings = []
embeddings_dim = None
for line in tf.gfile.GFile(file_path):
(token, embedding) = parse_line_fn(line)
if not embeddings_dim:
embeddings_dim = len(embedding) # depends on [control=['if'], data=[]]
elif embeddings_dim != len(embedding):
raise ValueError('Inconsistent embedding dimension detected, %d != %d for token %s', embeddings_dim, len(embedding), token) # depends on [control=['if'], data=['embeddings_dim']]
vocabulary.append(token)
embeddings.append(embedding) # depends on [control=['for'], data=['line']]
return (vocabulary, np.array(embeddings)) |
def geocode(
self,
query,
bbox=None,
mapview=None,
exactly_one=True,
maxresults=None,
pageinformation=None,
language=None,
additional_data=False,
timeout=DEFAULT_SENTINEL
):
"""
Return a location point by address.
This implementation supports only a subset of all available parameters.
A list of all parameters of the pure REST API is available here:
https://developer.here.com/documentation/geocoder/topics/resource-geocode.html
:param str query: The address or query you wish to geocode.
For a structured query, provide a dictionary whose keys
are one of: `city`, `county`, `district`, `country`, `state`,
`street`, `housenumber`, or `postalcode`.
:param bbox: A type of spatial filter, limits the search for any other attributes
in the request. Specified by two coordinate (lat/lon)
pairs -- corners of the box. `The bbox search is currently similar
to mapview but it is not extended` (cited from the REST API docs).
Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type bbox: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param mapview: The app's viewport, given as two coordinate pairs, specified
by two lat/lon pairs -- corners of the bounding box,
respectively. Matches from within the set map view plus an extended area
are ranked highest. Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type mapview: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int maxresults: Defines the maximum number of items in the
response structure. If not provided and there are multiple results
the HERE API will return 10 results by default. This will be reset
to one if ``exactly_one`` is True.
:param int pageinformation: A key which identifies the page to be returned
when the response is separated into multiple pages. Only useful when
``maxresults`` is also provided.
:param str language: Affects the language of the response,
must be a RFC 4647 language code, e.g. 'en-US'.
:param str additional_data: A string with key-value pairs as described on
https://developer.here.com/documentation/geocoder/topics/resource-params-additional.html.
These will be added as one query parameter to the URL.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if isinstance(query, dict):
params = {
key: val
for key, val
in query.items()
if key in self.structured_query_params
}
params['app_id'] = self.app_id
params['app_code'] = self.app_code
else:
params = {
'searchtext': self.format_string % query,
'app_id': self.app_id,
'app_code': self.app_code
}
if bbox:
params['bbox'] = self._format_bounding_box(
bbox, "%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s")
if mapview:
params['mapview'] = self._format_bounding_box(
mapview, "%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s")
if pageinformation:
params['pageinformation'] = pageinformation
if maxresults:
params['maxresults'] = maxresults
if exactly_one:
params['maxresults'] = 1
if language:
params['language'] = language
if additional_data:
params['additionaldata'] = additional_data
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one
) | def function[geocode, parameter[self, query, bbox, mapview, exactly_one, maxresults, pageinformation, language, additional_data, timeout]]:
constant[
Return a location point by address.
This implementation supports only a subset of all available parameters.
A list of all parameters of the pure REST API is available here:
https://developer.here.com/documentation/geocoder/topics/resource-geocode.html
:param str query: The address or query you wish to geocode.
For a structured query, provide a dictionary whose keys
are one of: `city`, `county`, `district`, `country`, `state`,
`street`, `housenumber`, or `postalcode`.
:param bbox: A type of spatial filter, limits the search for any other attributes
in the request. Specified by two coordinate (lat/lon)
pairs -- corners of the box. `The bbox search is currently similar
to mapview but it is not extended` (cited from the REST API docs).
Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type bbox: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param mapview: The app's viewport, given as two coordinate pairs, specified
by two lat/lon pairs -- corners of the bounding box,
respectively. Matches from within the set map view plus an extended area
are ranked highest. Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type mapview: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int maxresults: Defines the maximum number of items in the
response structure. If not provided and there are multiple results
the HERE API will return 10 results by default. This will be reset
to one if ``exactly_one`` is True.
:param int pageinformation: A key which identifies the page to be returned
when the response is separated into multiple pages. Only useful when
``maxresults`` is also provided.
:param str language: Affects the language of the response,
must be a RFC 4647 language code, e.g. 'en-US'.
:param str additional_data: A string with key-value pairs as described on
https://developer.here.com/documentation/geocoder/topics/resource-params-additional.html.
These will be added as one query parameter to the URL.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
]
if call[name[isinstance], parameter[name[query], name[dict]]] begin[:]
variable[params] assign[=] <ast.DictComp object at 0x7da2044c30a0>
call[name[params]][constant[app_id]] assign[=] name[self].app_id
call[name[params]][constant[app_code]] assign[=] name[self].app_code
if name[bbox] begin[:]
call[name[params]][constant[bbox]] assign[=] call[name[self]._format_bounding_box, parameter[name[bbox], constant[%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s]]]
if name[mapview] begin[:]
call[name[params]][constant[mapview]] assign[=] call[name[self]._format_bounding_box, parameter[name[mapview], constant[%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s]]]
if name[pageinformation] begin[:]
call[name[params]][constant[pageinformation]] assign[=] name[pageinformation]
if name[maxresults] begin[:]
call[name[params]][constant[maxresults]] assign[=] name[maxresults]
if name[exactly_one] begin[:]
call[name[params]][constant[maxresults]] assign[=] constant[1]
if name[language] begin[:]
call[name[params]][constant[language]] assign[=] name[language]
if name[additional_data] begin[:]
call[name[params]][constant[additionaldata]] assign[=] name[additional_data]
variable[url] assign[=] call[constant[?].join, parameter[tuple[[<ast.Attribute object at 0x7da18f813fd0>, <ast.Call object at 0x7da18f813730>]]]]
call[name[logger].debug, parameter[constant[%s.geocode: %s], name[self].__class__.__name__, name[url]]]
return[call[name[self]._parse_json, parameter[call[name[self]._call_geocoder, parameter[name[url]]], name[exactly_one]]]] | keyword[def] identifier[geocode] (
identifier[self] ,
identifier[query] ,
identifier[bbox] = keyword[None] ,
identifier[mapview] = keyword[None] ,
identifier[exactly_one] = keyword[True] ,
identifier[maxresults] = keyword[None] ,
identifier[pageinformation] = keyword[None] ,
identifier[language] = keyword[None] ,
identifier[additional_data] = keyword[False] ,
identifier[timeout] = identifier[DEFAULT_SENTINEL]
):
literal[string]
keyword[if] identifier[isinstance] ( identifier[query] , identifier[dict] ):
identifier[params] ={
identifier[key] : identifier[val]
keyword[for] identifier[key] , identifier[val]
keyword[in] identifier[query] . identifier[items] ()
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[structured_query_params]
}
identifier[params] [ literal[string] ]= identifier[self] . identifier[app_id]
identifier[params] [ literal[string] ]= identifier[self] . identifier[app_code]
keyword[else] :
identifier[params] ={
literal[string] : identifier[self] . identifier[format_string] % identifier[query] ,
literal[string] : identifier[self] . identifier[app_id] ,
literal[string] : identifier[self] . identifier[app_code]
}
keyword[if] identifier[bbox] :
identifier[params] [ literal[string] ]= identifier[self] . identifier[_format_bounding_box] (
identifier[bbox] , literal[string] )
keyword[if] identifier[mapview] :
identifier[params] [ literal[string] ]= identifier[self] . identifier[_format_bounding_box] (
identifier[mapview] , literal[string] )
keyword[if] identifier[pageinformation] :
identifier[params] [ literal[string] ]= identifier[pageinformation]
keyword[if] identifier[maxresults] :
identifier[params] [ literal[string] ]= identifier[maxresults]
keyword[if] identifier[exactly_one] :
identifier[params] [ literal[string] ]= literal[int]
keyword[if] identifier[language] :
identifier[params] [ literal[string] ]= identifier[language]
keyword[if] identifier[additional_data] :
identifier[params] [ literal[string] ]= identifier[additional_data]
identifier[url] = literal[string] . identifier[join] (( identifier[self] . identifier[api] , identifier[urlencode] ( identifier[params] )))
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[url] )
keyword[return] identifier[self] . identifier[_parse_json] (
identifier[self] . identifier[_call_geocoder] ( identifier[url] , identifier[timeout] = identifier[timeout] ),
identifier[exactly_one]
) | def geocode(self, query, bbox=None, mapview=None, exactly_one=True, maxresults=None, pageinformation=None, language=None, additional_data=False, timeout=DEFAULT_SENTINEL):
"""
Return a location point by address.
This implementation supports only a subset of all available parameters.
A list of all parameters of the pure REST API is available here:
https://developer.here.com/documentation/geocoder/topics/resource-geocode.html
:param str query: The address or query you wish to geocode.
For a structured query, provide a dictionary whose keys
are one of: `city`, `county`, `district`, `country`, `state`,
`street`, `housenumber`, or `postalcode`.
:param bbox: A type of spatial filter, limits the search for any other attributes
in the request. Specified by two coordinate (lat/lon)
pairs -- corners of the box. `The bbox search is currently similar
to mapview but it is not extended` (cited from the REST API docs).
Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type bbox: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param mapview: The app's viewport, given as two coordinate pairs, specified
by two lat/lon pairs -- corners of the bounding box,
respectively. Matches from within the set map view plus an extended area
are ranked highest. Relevant global results are also returned.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type mapview: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int maxresults: Defines the maximum number of items in the
response structure. If not provided and there are multiple results
the HERE API will return 10 results by default. This will be reset
to one if ``exactly_one`` is True.
:param int pageinformation: A key which identifies the page to be returned
when the response is separated into multiple pages. Only useful when
``maxresults`` is also provided.
:param str language: Affects the language of the response,
must be a RFC 4647 language code, e.g. 'en-US'.
:param str additional_data: A string with key-value pairs as described on
https://developer.here.com/documentation/geocoder/topics/resource-params-additional.html.
These will be added as one query parameter to the URL.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if isinstance(query, dict):
params = {key: val for (key, val) in query.items() if key in self.structured_query_params}
params['app_id'] = self.app_id
params['app_code'] = self.app_code # depends on [control=['if'], data=[]]
else:
params = {'searchtext': self.format_string % query, 'app_id': self.app_id, 'app_code': self.app_code}
if bbox:
params['bbox'] = self._format_bounding_box(bbox, '%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s') # depends on [control=['if'], data=[]]
if mapview:
params['mapview'] = self._format_bounding_box(mapview, '%(lat2)s,%(lon1)s;%(lat1)s,%(lon2)s') # depends on [control=['if'], data=[]]
if pageinformation:
params['pageinformation'] = pageinformation # depends on [control=['if'], data=[]]
if maxresults:
params['maxresults'] = maxresults # depends on [control=['if'], data=[]]
if exactly_one:
params['maxresults'] = 1 # depends on [control=['if'], data=[]]
if language:
params['language'] = language # depends on [control=['if'], data=[]]
if additional_data:
params['additionaldata'] = additional_data # depends on [control=['if'], data=[]]
url = '?'.join((self.api, urlencode(params)))
logger.debug('%s.geocode: %s', self.__class__.__name__, url)
return self._parse_json(self._call_geocoder(url, timeout=timeout), exactly_one) |
def _delete_plot(cls, plot_id):
"""
Deletes registered plots and calls Plot.cleanup
"""
plot = cls._plots.get(plot_id)
if plot is None:
return
plot.cleanup()
del cls._plots[plot_id] | def function[_delete_plot, parameter[cls, plot_id]]:
constant[
Deletes registered plots and calls Plot.cleanup
]
variable[plot] assign[=] call[name[cls]._plots.get, parameter[name[plot_id]]]
if compare[name[plot] is constant[None]] begin[:]
return[None]
call[name[plot].cleanup, parameter[]]
<ast.Delete object at 0x7da18f58ec50> | keyword[def] identifier[_delete_plot] ( identifier[cls] , identifier[plot_id] ):
literal[string]
identifier[plot] = identifier[cls] . identifier[_plots] . identifier[get] ( identifier[plot_id] )
keyword[if] identifier[plot] keyword[is] keyword[None] :
keyword[return]
identifier[plot] . identifier[cleanup] ()
keyword[del] identifier[cls] . identifier[_plots] [ identifier[plot_id] ] | def _delete_plot(cls, plot_id):
"""
Deletes registered plots and calls Plot.cleanup
"""
plot = cls._plots.get(plot_id)
if plot is None:
return # depends on [control=['if'], data=[]]
plot.cleanup()
del cls._plots[plot_id] |
def encrypt_template(self, enc_key, mac_key, enc_offset):
"""
Encrypts current tpl_buf according to the protocol - symmetric encryption
:param enc_key:
:param mac_key:
:param enc_offset:
:return:
"""
# AES-256-CBC/PKCS7Padding
to_encrypt = self.tpl_buff[enc_offset:]
encrypted = aes_enc(enc_key, PKCS7.pad(to_encrypt))
# Mac the whole buffer
to_mac = PKCS7.pad(self.tpl_buff[:enc_offset] + encrypted)
mac = cbc_mac(mac_key, to_mac)
return to_mac + mac | def function[encrypt_template, parameter[self, enc_key, mac_key, enc_offset]]:
constant[
Encrypts current tpl_buf according to the protocol - symmetric encryption
:param enc_key:
:param mac_key:
:param enc_offset:
:return:
]
variable[to_encrypt] assign[=] call[name[self].tpl_buff][<ast.Slice object at 0x7da1b16035e0>]
variable[encrypted] assign[=] call[name[aes_enc], parameter[name[enc_key], call[name[PKCS7].pad, parameter[name[to_encrypt]]]]]
variable[to_mac] assign[=] call[name[PKCS7].pad, parameter[binary_operation[call[name[self].tpl_buff][<ast.Slice object at 0x7da1b16026b0>] + name[encrypted]]]]
variable[mac] assign[=] call[name[cbc_mac], parameter[name[mac_key], name[to_mac]]]
return[binary_operation[name[to_mac] + name[mac]]] | keyword[def] identifier[encrypt_template] ( identifier[self] , identifier[enc_key] , identifier[mac_key] , identifier[enc_offset] ):
literal[string]
identifier[to_encrypt] = identifier[self] . identifier[tpl_buff] [ identifier[enc_offset] :]
identifier[encrypted] = identifier[aes_enc] ( identifier[enc_key] , identifier[PKCS7] . identifier[pad] ( identifier[to_encrypt] ))
identifier[to_mac] = identifier[PKCS7] . identifier[pad] ( identifier[self] . identifier[tpl_buff] [: identifier[enc_offset] ]+ identifier[encrypted] )
identifier[mac] = identifier[cbc_mac] ( identifier[mac_key] , identifier[to_mac] )
keyword[return] identifier[to_mac] + identifier[mac] | def encrypt_template(self, enc_key, mac_key, enc_offset):
"""
Encrypts current tpl_buf according to the protocol - symmetric encryption
:param enc_key:
:param mac_key:
:param enc_offset:
:return:
"""
# AES-256-CBC/PKCS7Padding
to_encrypt = self.tpl_buff[enc_offset:]
encrypted = aes_enc(enc_key, PKCS7.pad(to_encrypt))
# Mac the whole buffer
to_mac = PKCS7.pad(self.tpl_buff[:enc_offset] + encrypted)
mac = cbc_mac(mac_key, to_mac)
return to_mac + mac |
def _all_same_area(self, dataset_ids):
"""Return True if all areas for the provided IDs are equal."""
all_areas = []
for ds_id in dataset_ids:
for scn in self.scenes:
ds = scn.get(ds_id)
if ds is None:
continue
all_areas.append(ds.attrs.get('area'))
all_areas = [area for area in all_areas if area is not None]
return all(all_areas[0] == area for area in all_areas[1:]) | def function[_all_same_area, parameter[self, dataset_ids]]:
constant[Return True if all areas for the provided IDs are equal.]
variable[all_areas] assign[=] list[[]]
for taget[name[ds_id]] in starred[name[dataset_ids]] begin[:]
for taget[name[scn]] in starred[name[self].scenes] begin[:]
variable[ds] assign[=] call[name[scn].get, parameter[name[ds_id]]]
if compare[name[ds] is constant[None]] begin[:]
continue
call[name[all_areas].append, parameter[call[name[ds].attrs.get, parameter[constant[area]]]]]
variable[all_areas] assign[=] <ast.ListComp object at 0x7da1b22fb250>
return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b22fb160>]]] | keyword[def] identifier[_all_same_area] ( identifier[self] , identifier[dataset_ids] ):
literal[string]
identifier[all_areas] =[]
keyword[for] identifier[ds_id] keyword[in] identifier[dataset_ids] :
keyword[for] identifier[scn] keyword[in] identifier[self] . identifier[scenes] :
identifier[ds] = identifier[scn] . identifier[get] ( identifier[ds_id] )
keyword[if] identifier[ds] keyword[is] keyword[None] :
keyword[continue]
identifier[all_areas] . identifier[append] ( identifier[ds] . identifier[attrs] . identifier[get] ( literal[string] ))
identifier[all_areas] =[ identifier[area] keyword[for] identifier[area] keyword[in] identifier[all_areas] keyword[if] identifier[area] keyword[is] keyword[not] keyword[None] ]
keyword[return] identifier[all] ( identifier[all_areas] [ literal[int] ]== identifier[area] keyword[for] identifier[area] keyword[in] identifier[all_areas] [ literal[int] :]) | def _all_same_area(self, dataset_ids):
"""Return True if all areas for the provided IDs are equal."""
all_areas = []
for ds_id in dataset_ids:
for scn in self.scenes:
ds = scn.get(ds_id)
if ds is None:
continue # depends on [control=['if'], data=[]]
all_areas.append(ds.attrs.get('area')) # depends on [control=['for'], data=['scn']] # depends on [control=['for'], data=['ds_id']]
all_areas = [area for area in all_areas if area is not None]
return all((all_areas[0] == area for area in all_areas[1:])) |
def check_if_single_file(client, fileshare, prefix, timeout=None):
# type: (azure.storage.file.FileService, str, str, int) ->
# Tuple[bool, azure.storage.file.models.File]
"""Check if prefix is a single file or multiple files
:param FileService client: blob client
:param str fileshare: file share name
:param str prefix: path prefix
:param int timeout: timeout
:rtype: tuple
:return: (if prefix in fileshare is a single file, file)
"""
if blobxfer.util.is_none_or_empty(prefix):
return (False, None)
file = get_file_properties(client, fileshare, prefix, timeout)
if file is None:
return (False, file)
else:
return (True, file) | def function[check_if_single_file, parameter[client, fileshare, prefix, timeout]]:
constant[Check if prefix is a single file or multiple files
:param FileService client: blob client
:param str fileshare: file share name
:param str prefix: path prefix
:param int timeout: timeout
:rtype: tuple
:return: (if prefix in fileshare is a single file, file)
]
if call[name[blobxfer].util.is_none_or_empty, parameter[name[prefix]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18eb55120>, <ast.Constant object at 0x7da18eb55750>]]]
variable[file] assign[=] call[name[get_file_properties], parameter[name[client], name[fileshare], name[prefix], name[timeout]]]
if compare[name[file] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18eb54e80>, <ast.Name object at 0x7da18eb54160>]]] | keyword[def] identifier[check_if_single_file] ( identifier[client] , identifier[fileshare] , identifier[prefix] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_none_or_empty] ( identifier[prefix] ):
keyword[return] ( keyword[False] , keyword[None] )
identifier[file] = identifier[get_file_properties] ( identifier[client] , identifier[fileshare] , identifier[prefix] , identifier[timeout] )
keyword[if] identifier[file] keyword[is] keyword[None] :
keyword[return] ( keyword[False] , identifier[file] )
keyword[else] :
keyword[return] ( keyword[True] , identifier[file] ) | def check_if_single_file(client, fileshare, prefix, timeout=None):
# type: (azure.storage.file.FileService, str, str, int) ->
# Tuple[bool, azure.storage.file.models.File]
'Check if prefix is a single file or multiple files\n :param FileService client: blob client\n :param str fileshare: file share name\n :param str prefix: path prefix\n :param int timeout: timeout\n :rtype: tuple\n :return: (if prefix in fileshare is a single file, file)\n '
if blobxfer.util.is_none_or_empty(prefix):
return (False, None) # depends on [control=['if'], data=[]]
file = get_file_properties(client, fileshare, prefix, timeout)
if file is None:
return (False, file) # depends on [control=['if'], data=['file']]
else:
return (True, file) |
def com_google_fonts_check_name_fullfontname(ttFont,
style_with_spaces,
familyname_with_spaces):
""" Check name table: FULL_FONT_NAME entries. """
from fontbakery.utils import name_entry_id
failed = False
for name in ttFont['name'].names:
if name.nameID == NameID.FULL_FONT_NAME:
expected_value = "{} {}".format(familyname_with_spaces,
style_with_spaces)
string = name.string.decode(name.getEncoding()).strip()
if string != expected_value:
failed = True
# special case
# see https://github.com/googlefonts/fontbakery/issues/1436
if style_with_spaces == "Regular" \
and string == familyname_with_spaces:
yield WARN, ("Entry {} on the 'name' table:"
" Got '{}' which lacks 'Regular',"
" but it is probably OK in this case."
"").format(name_entry_id(name),
string)
else:
yield FAIL, ("Entry {} on the 'name' table: "
"Expected '{}' "
"but got '{}'.").format(name_entry_id(name),
expected_value,
string)
if not failed:
yield PASS, "FULL_FONT_NAME entries are all good." | def function[com_google_fonts_check_name_fullfontname, parameter[ttFont, style_with_spaces, familyname_with_spaces]]:
constant[ Check name table: FULL_FONT_NAME entries. ]
from relative_module[fontbakery.utils] import module[name_entry_id]
variable[failed] assign[=] constant[False]
for taget[name[name]] in starred[call[name[ttFont]][constant[name]].names] begin[:]
if compare[name[name].nameID equal[==] name[NameID].FULL_FONT_NAME] begin[:]
variable[expected_value] assign[=] call[constant[{} {}].format, parameter[name[familyname_with_spaces], name[style_with_spaces]]]
variable[string] assign[=] call[call[name[name].string.decode, parameter[call[name[name].getEncoding, parameter[]]]].strip, parameter[]]
if compare[name[string] not_equal[!=] name[expected_value]] begin[:]
variable[failed] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b12f3070> begin[:]
<ast.Yield object at 0x7da1b12f28c0>
if <ast.UnaryOp object at 0x7da1b1252860> begin[:]
<ast.Yield object at 0x7da1b1250970> | keyword[def] identifier[com_google_fonts_check_name_fullfontname] ( identifier[ttFont] ,
identifier[style_with_spaces] ,
identifier[familyname_with_spaces] ):
literal[string]
keyword[from] identifier[fontbakery] . identifier[utils] keyword[import] identifier[name_entry_id]
identifier[failed] = keyword[False]
keyword[for] identifier[name] keyword[in] identifier[ttFont] [ literal[string] ]. identifier[names] :
keyword[if] identifier[name] . identifier[nameID] == identifier[NameID] . identifier[FULL_FONT_NAME] :
identifier[expected_value] = literal[string] . identifier[format] ( identifier[familyname_with_spaces] ,
identifier[style_with_spaces] )
identifier[string] = identifier[name] . identifier[string] . identifier[decode] ( identifier[name] . identifier[getEncoding] ()). identifier[strip] ()
keyword[if] identifier[string] != identifier[expected_value] :
identifier[failed] = keyword[True]
keyword[if] identifier[style_with_spaces] == literal[string] keyword[and] identifier[string] == identifier[familyname_with_spaces] :
keyword[yield] identifier[WARN] ,( literal[string]
literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[name_entry_id] ( identifier[name] ),
identifier[string] )
keyword[else] :
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[name_entry_id] ( identifier[name] ),
identifier[expected_value] ,
identifier[string] )
keyword[if] keyword[not] identifier[failed] :
keyword[yield] identifier[PASS] , literal[string] | def com_google_fonts_check_name_fullfontname(ttFont, style_with_spaces, familyname_with_spaces):
""" Check name table: FULL_FONT_NAME entries. """
from fontbakery.utils import name_entry_id
failed = False
for name in ttFont['name'].names:
if name.nameID == NameID.FULL_FONT_NAME:
expected_value = '{} {}'.format(familyname_with_spaces, style_with_spaces)
string = name.string.decode(name.getEncoding()).strip()
if string != expected_value:
failed = True
# special case
# see https://github.com/googlefonts/fontbakery/issues/1436
if style_with_spaces == 'Regular' and string == familyname_with_spaces:
yield (WARN, "Entry {} on the 'name' table: Got '{}' which lacks 'Regular', but it is probably OK in this case.".format(name_entry_id(name), string)) # depends on [control=['if'], data=[]]
else:
yield (FAIL, "Entry {} on the 'name' table: Expected '{}' but got '{}'.".format(name_entry_id(name), expected_value, string)) # depends on [control=['if'], data=['string', 'expected_value']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
if not failed:
yield (PASS, 'FULL_FONT_NAME entries are all good.') # depends on [control=['if'], data=[]] |
def add_public_note(self, public_note, source=None):
"""Add public note.
:param public_note: public note for the current article.
:type public_note: string
:param source: source for the given notes.
:type source: string
"""
self._append_to('public_notes', self._sourced_dict(
source,
value=public_note,
)) | def function[add_public_note, parameter[self, public_note, source]]:
constant[Add public note.
:param public_note: public note for the current article.
:type public_note: string
:param source: source for the given notes.
:type source: string
]
call[name[self]._append_to, parameter[constant[public_notes], call[name[self]._sourced_dict, parameter[name[source]]]]] | keyword[def] identifier[add_public_note] ( identifier[self] , identifier[public_note] , identifier[source] = keyword[None] ):
literal[string]
identifier[self] . identifier[_append_to] ( literal[string] , identifier[self] . identifier[_sourced_dict] (
identifier[source] ,
identifier[value] = identifier[public_note] ,
)) | def add_public_note(self, public_note, source=None):
"""Add public note.
:param public_note: public note for the current article.
:type public_note: string
:param source: source for the given notes.
:type source: string
"""
self._append_to('public_notes', self._sourced_dict(source, value=public_note)) |
def extraction_to_conll(ex: Extraction) -> List[str]:
"""
Return a conll representation of a given input Extraction.
"""
ex = split_predicate(ex)
toks = ex.sent.split(' ')
ret = ['*'] * len(toks)
args = [ex.arg1] + ex.args2
rels_and_args = [("ARG{}".format(arg_ind), arg)
for arg_ind, arg in enumerate(args)] + \
[(rel_part.elem_type, rel_part)
for rel_part
in ex.rel]
for rel, arg in rels_and_args:
# Add brackets
cur_start_ind = char_to_word_index(arg.span[0],
ex.sent)
cur_end_ind = char_to_word_index(arg.span[1],
ex.sent)
ret[cur_start_ind] = "({}{}".format(rel, ret[cur_start_ind])
ret[cur_end_ind] += ')'
return ret | def function[extraction_to_conll, parameter[ex]]:
constant[
Return a conll representation of a given input Extraction.
]
variable[ex] assign[=] call[name[split_predicate], parameter[name[ex]]]
variable[toks] assign[=] call[name[ex].sent.split, parameter[constant[ ]]]
variable[ret] assign[=] binary_operation[list[[<ast.Constant object at 0x7da20c990c70>]] * call[name[len], parameter[name[toks]]]]
variable[args] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da20c993640>]] + name[ex].args2]
variable[rels_and_args] assign[=] binary_operation[<ast.ListComp object at 0x7da20c990670> + <ast.ListComp object at 0x7da20c993b80>]
for taget[tuple[[<ast.Name object at 0x7da20c9909d0>, <ast.Name object at 0x7da20c991510>]]] in starred[name[rels_and_args]] begin[:]
variable[cur_start_ind] assign[=] call[name[char_to_word_index], parameter[call[name[arg].span][constant[0]], name[ex].sent]]
variable[cur_end_ind] assign[=] call[name[char_to_word_index], parameter[call[name[arg].span][constant[1]], name[ex].sent]]
call[name[ret]][name[cur_start_ind]] assign[=] call[constant[({}{}].format, parameter[name[rel], call[name[ret]][name[cur_start_ind]]]]
<ast.AugAssign object at 0x7da20e956ec0>
return[name[ret]] | keyword[def] identifier[extraction_to_conll] ( identifier[ex] : identifier[Extraction] )-> identifier[List] [ identifier[str] ]:
literal[string]
identifier[ex] = identifier[split_predicate] ( identifier[ex] )
identifier[toks] = identifier[ex] . identifier[sent] . identifier[split] ( literal[string] )
identifier[ret] =[ literal[string] ]* identifier[len] ( identifier[toks] )
identifier[args] =[ identifier[ex] . identifier[arg1] ]+ identifier[ex] . identifier[args2]
identifier[rels_and_args] =[( literal[string] . identifier[format] ( identifier[arg_ind] ), identifier[arg] )
keyword[for] identifier[arg_ind] , identifier[arg] keyword[in] identifier[enumerate] ( identifier[args] )]+[( identifier[rel_part] . identifier[elem_type] , identifier[rel_part] )
keyword[for] identifier[rel_part]
keyword[in] identifier[ex] . identifier[rel] ]
keyword[for] identifier[rel] , identifier[arg] keyword[in] identifier[rels_and_args] :
identifier[cur_start_ind] = identifier[char_to_word_index] ( identifier[arg] . identifier[span] [ literal[int] ],
identifier[ex] . identifier[sent] )
identifier[cur_end_ind] = identifier[char_to_word_index] ( identifier[arg] . identifier[span] [ literal[int] ],
identifier[ex] . identifier[sent] )
identifier[ret] [ identifier[cur_start_ind] ]= literal[string] . identifier[format] ( identifier[rel] , identifier[ret] [ identifier[cur_start_ind] ])
identifier[ret] [ identifier[cur_end_ind] ]+= literal[string]
keyword[return] identifier[ret] | def extraction_to_conll(ex: Extraction) -> List[str]:
"""
Return a conll representation of a given input Extraction.
"""
ex = split_predicate(ex)
toks = ex.sent.split(' ')
ret = ['*'] * len(toks)
args = [ex.arg1] + ex.args2
rels_and_args = [('ARG{}'.format(arg_ind), arg) for (arg_ind, arg) in enumerate(args)] + [(rel_part.elem_type, rel_part) for rel_part in ex.rel]
for (rel, arg) in rels_and_args:
# Add brackets
cur_start_ind = char_to_word_index(arg.span[0], ex.sent)
cur_end_ind = char_to_word_index(arg.span[1], ex.sent)
ret[cur_start_ind] = '({}{}'.format(rel, ret[cur_start_ind])
ret[cur_end_ind] += ')' # depends on [control=['for'], data=[]]
return ret |
def specbits(self):
"""Returns the array of arguments that would be given to
iptables for the current Rule.
"""
def host_bits(opt, optval):
# handle the case where this is a negated value
m = re.match(r'^!\s*(.*)', optval)
if m:
return ['!', opt, m.group(1)]
else:
return [opt, optval]
bits = []
if self.protocol:
bits.extend(host_bits('-p', self.protocol))
if self.in_interface:
bits.extend(host_bits('-i', self.in_interface))
if self.out_interface:
bits.extend(host_bits('-o', self.out_interface))
if self.source:
bits.extend(host_bits('-s', self.source))
if self.destination:
bits.extend(host_bits('-d', self.destination))
for mod in self.matches:
bits.extend(['-m', mod.name()])
bits.extend(mod.specbits())
if self.goto:
bits.extend(['-g', self.goto.name()])
bits.extend(self.goto.specbits())
elif self.jump:
bits.extend(['-j', self.jump.name()])
bits.extend(self.jump.specbits())
return bits | def function[specbits, parameter[self]]:
constant[Returns the array of arguments that would be given to
iptables for the current Rule.
]
def function[host_bits, parameter[opt, optval]]:
variable[m] assign[=] call[name[re].match, parameter[constant[^!\s*(.*)], name[optval]]]
if name[m] begin[:]
return[list[[<ast.Constant object at 0x7da1b28bf790>, <ast.Name object at 0x7da1b28bc340>, <ast.Call object at 0x7da1b28be710>]]]
variable[bits] assign[=] list[[]]
if name[self].protocol begin[:]
call[name[bits].extend, parameter[call[name[host_bits], parameter[constant[-p], name[self].protocol]]]]
if name[self].in_interface begin[:]
call[name[bits].extend, parameter[call[name[host_bits], parameter[constant[-i], name[self].in_interface]]]]
if name[self].out_interface begin[:]
call[name[bits].extend, parameter[call[name[host_bits], parameter[constant[-o], name[self].out_interface]]]]
if name[self].source begin[:]
call[name[bits].extend, parameter[call[name[host_bits], parameter[constant[-s], name[self].source]]]]
if name[self].destination begin[:]
call[name[bits].extend, parameter[call[name[host_bits], parameter[constant[-d], name[self].destination]]]]
for taget[name[mod]] in starred[name[self].matches] begin[:]
call[name[bits].extend, parameter[list[[<ast.Constant object at 0x7da1b2838220>, <ast.Call object at 0x7da1b2839c60>]]]]
call[name[bits].extend, parameter[call[name[mod].specbits, parameter[]]]]
if name[self].goto begin[:]
call[name[bits].extend, parameter[list[[<ast.Constant object at 0x7da1b2839510>, <ast.Call object at 0x7da1b2839e70>]]]]
call[name[bits].extend, parameter[call[name[self].goto.specbits, parameter[]]]]
return[name[bits]] | keyword[def] identifier[specbits] ( identifier[self] ):
literal[string]
keyword[def] identifier[host_bits] ( identifier[opt] , identifier[optval] ):
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[optval] )
keyword[if] identifier[m] :
keyword[return] [ literal[string] , identifier[opt] , identifier[m] . identifier[group] ( literal[int] )]
keyword[else] :
keyword[return] [ identifier[opt] , identifier[optval] ]
identifier[bits] =[]
keyword[if] identifier[self] . identifier[protocol] :
identifier[bits] . identifier[extend] ( identifier[host_bits] ( literal[string] , identifier[self] . identifier[protocol] ))
keyword[if] identifier[self] . identifier[in_interface] :
identifier[bits] . identifier[extend] ( identifier[host_bits] ( literal[string] , identifier[self] . identifier[in_interface] ))
keyword[if] identifier[self] . identifier[out_interface] :
identifier[bits] . identifier[extend] ( identifier[host_bits] ( literal[string] , identifier[self] . identifier[out_interface] ))
keyword[if] identifier[self] . identifier[source] :
identifier[bits] . identifier[extend] ( identifier[host_bits] ( literal[string] , identifier[self] . identifier[source] ))
keyword[if] identifier[self] . identifier[destination] :
identifier[bits] . identifier[extend] ( identifier[host_bits] ( literal[string] , identifier[self] . identifier[destination] ))
keyword[for] identifier[mod] keyword[in] identifier[self] . identifier[matches] :
identifier[bits] . identifier[extend] ([ literal[string] , identifier[mod] . identifier[name] ()])
identifier[bits] . identifier[extend] ( identifier[mod] . identifier[specbits] ())
keyword[if] identifier[self] . identifier[goto] :
identifier[bits] . identifier[extend] ([ literal[string] , identifier[self] . identifier[goto] . identifier[name] ()])
identifier[bits] . identifier[extend] ( identifier[self] . identifier[goto] . identifier[specbits] ())
keyword[elif] identifier[self] . identifier[jump] :
identifier[bits] . identifier[extend] ([ literal[string] , identifier[self] . identifier[jump] . identifier[name] ()])
identifier[bits] . identifier[extend] ( identifier[self] . identifier[jump] . identifier[specbits] ())
keyword[return] identifier[bits] | def specbits(self):
"""Returns the array of arguments that would be given to
iptables for the current Rule.
"""
def host_bits(opt, optval):
# handle the case where this is a negated value
m = re.match('^!\\s*(.*)', optval)
if m:
return ['!', opt, m.group(1)] # depends on [control=['if'], data=[]]
else:
return [opt, optval]
bits = []
if self.protocol:
bits.extend(host_bits('-p', self.protocol)) # depends on [control=['if'], data=[]]
if self.in_interface:
bits.extend(host_bits('-i', self.in_interface)) # depends on [control=['if'], data=[]]
if self.out_interface:
bits.extend(host_bits('-o', self.out_interface)) # depends on [control=['if'], data=[]]
if self.source:
bits.extend(host_bits('-s', self.source)) # depends on [control=['if'], data=[]]
if self.destination:
bits.extend(host_bits('-d', self.destination)) # depends on [control=['if'], data=[]]
for mod in self.matches:
bits.extend(['-m', mod.name()])
bits.extend(mod.specbits()) # depends on [control=['for'], data=['mod']]
if self.goto:
bits.extend(['-g', self.goto.name()])
bits.extend(self.goto.specbits()) # depends on [control=['if'], data=[]]
elif self.jump:
bits.extend(['-j', self.jump.name()])
bits.extend(self.jump.specbits()) # depends on [control=['if'], data=[]]
return bits |
def _validate_swagger_file(self):
'''
High level check/validation of the input swagger file based on
https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md
This is not a full schema compliance check, but rather make sure that the input file (YAML or
JSON) can be read into a dictionary, and we check for the content of the Swagger Object for version
and info.
'''
# check for any invalid fields for Swagger Object V2
for field in self._cfg:
if (field not in _Swagger.SWAGGER_OBJ_V2_FIELDS and
not _Swagger.VENDOR_EXT_PATTERN.match(field)):
raise ValueError('Invalid Swagger Object Field: {0}'.format(field))
# check for Required Swagger fields by Saltstack boto apigateway state
for field in _Swagger.SWAGGER_OBJ_V2_FIELDS_REQUIRED:
if field not in self._cfg:
raise ValueError('Missing Swagger Object Field: {0}'.format(field))
# check for Swagger Version
self._swagger_version = self._cfg.get('swagger')
if self._swagger_version not in _Swagger.SWAGGER_VERSIONS_SUPPORTED:
raise ValueError('Unsupported Swagger version: {0},'
'Supported versions are {1}'.format(self._swagger_version,
_Swagger.SWAGGER_VERSIONS_SUPPORTED))
log.info(type(self._models))
self._validate_error_response_model(self.paths, self._models()) | def function[_validate_swagger_file, parameter[self]]:
constant[
High level check/validation of the input swagger file based on
https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md
This is not a full schema compliance check, but rather make sure that the input file (YAML or
JSON) can be read into a dictionary, and we check for the content of the Swagger Object for version
and info.
]
for taget[name[field]] in starred[name[self]._cfg] begin[:]
if <ast.BoolOp object at 0x7da1b21f0160> begin[:]
<ast.Raise object at 0x7da1b21f0f70>
for taget[name[field]] in starred[name[_Swagger].SWAGGER_OBJ_V2_FIELDS_REQUIRED] begin[:]
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> name[self]._cfg] begin[:]
<ast.Raise object at 0x7da1b21f1b70>
name[self]._swagger_version assign[=] call[name[self]._cfg.get, parameter[constant[swagger]]]
if compare[name[self]._swagger_version <ast.NotIn object at 0x7da2590d7190> name[_Swagger].SWAGGER_VERSIONS_SUPPORTED] begin[:]
<ast.Raise object at 0x7da1b215ff40>
call[name[log].info, parameter[call[name[type], parameter[name[self]._models]]]]
call[name[self]._validate_error_response_model, parameter[name[self].paths, call[name[self]._models, parameter[]]]] | keyword[def] identifier[_validate_swagger_file] ( identifier[self] ):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_cfg] :
keyword[if] ( identifier[field] keyword[not] keyword[in] identifier[_Swagger] . identifier[SWAGGER_OBJ_V2_FIELDS] keyword[and]
keyword[not] identifier[_Swagger] . identifier[VENDOR_EXT_PATTERN] . identifier[match] ( identifier[field] )):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[field] ))
keyword[for] identifier[field] keyword[in] identifier[_Swagger] . identifier[SWAGGER_OBJ_V2_FIELDS_REQUIRED] :
keyword[if] identifier[field] keyword[not] keyword[in] identifier[self] . identifier[_cfg] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[field] ))
identifier[self] . identifier[_swagger_version] = identifier[self] . identifier[_cfg] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[_swagger_version] keyword[not] keyword[in] identifier[_Swagger] . identifier[SWAGGER_VERSIONS_SUPPORTED] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[self] . identifier[_swagger_version] ,
identifier[_Swagger] . identifier[SWAGGER_VERSIONS_SUPPORTED] ))
identifier[log] . identifier[info] ( identifier[type] ( identifier[self] . identifier[_models] ))
identifier[self] . identifier[_validate_error_response_model] ( identifier[self] . identifier[paths] , identifier[self] . identifier[_models] ()) | def _validate_swagger_file(self):
"""
High level check/validation of the input swagger file based on
https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md
This is not a full schema compliance check, but rather make sure that the input file (YAML or
JSON) can be read into a dictionary, and we check for the content of the Swagger Object for version
and info.
"""
# check for any invalid fields for Swagger Object V2
for field in self._cfg:
if field not in _Swagger.SWAGGER_OBJ_V2_FIELDS and (not _Swagger.VENDOR_EXT_PATTERN.match(field)):
raise ValueError('Invalid Swagger Object Field: {0}'.format(field)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
# check for Required Swagger fields by Saltstack boto apigateway state
for field in _Swagger.SWAGGER_OBJ_V2_FIELDS_REQUIRED:
if field not in self._cfg:
raise ValueError('Missing Swagger Object Field: {0}'.format(field)) # depends on [control=['if'], data=['field']] # depends on [control=['for'], data=['field']]
# check for Swagger Version
self._swagger_version = self._cfg.get('swagger')
if self._swagger_version not in _Swagger.SWAGGER_VERSIONS_SUPPORTED:
raise ValueError('Unsupported Swagger version: {0},Supported versions are {1}'.format(self._swagger_version, _Swagger.SWAGGER_VERSIONS_SUPPORTED)) # depends on [control=['if'], data=[]]
log.info(type(self._models))
self._validate_error_response_model(self.paths, self._models()) |
def SetMaxCurrent(self, i):
"""Set the max output current.
"""
if i < 0 or i > 8:
raise MonsoonError(("Target max current %sA, is out of acceptable "
"range [0, 8].") % i)
val = 1023 - int((i / 8) * 1023)
self._SendStruct("BBB", 0x01, 0x0a, val & 0xff)
self._SendStruct("BBB", 0x01, 0x0b, val >> 8) | def function[SetMaxCurrent, parameter[self, i]]:
constant[Set the max output current.
]
if <ast.BoolOp object at 0x7da1b086b520> begin[:]
<ast.Raise object at 0x7da1b0869570>
variable[val] assign[=] binary_operation[constant[1023] - call[name[int], parameter[binary_operation[binary_operation[name[i] / constant[8]] * constant[1023]]]]]
call[name[self]._SendStruct, parameter[constant[BBB], constant[1], constant[10], binary_operation[name[val] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]]]
call[name[self]._SendStruct, parameter[constant[BBB], constant[1], constant[11], binary_operation[name[val] <ast.RShift object at 0x7da2590d6a40> constant[8]]]] | keyword[def] identifier[SetMaxCurrent] ( identifier[self] , identifier[i] ):
literal[string]
keyword[if] identifier[i] < literal[int] keyword[or] identifier[i] > literal[int] :
keyword[raise] identifier[MonsoonError] (( literal[string]
literal[string] )% identifier[i] )
identifier[val] = literal[int] - identifier[int] (( identifier[i] / literal[int] )* literal[int] )
identifier[self] . identifier[_SendStruct] ( literal[string] , literal[int] , literal[int] , identifier[val] & literal[int] )
identifier[self] . identifier[_SendStruct] ( literal[string] , literal[int] , literal[int] , identifier[val] >> literal[int] ) | def SetMaxCurrent(self, i):
"""Set the max output current.
"""
if i < 0 or i > 8:
raise MonsoonError('Target max current %sA, is out of acceptable range [0, 8].' % i) # depends on [control=['if'], data=[]]
val = 1023 - int(i / 8 * 1023)
self._SendStruct('BBB', 1, 10, val & 255)
self._SendStruct('BBB', 1, 11, val >> 8) |
def delete(self, remote_path, **kwargs):
"""删除单个文件或目录.
.. warning::
* 文件/目录删除后默认临时存放在回收站内,删除文件或目录的临时存放
不占用用户的空间配额;
* 存放有效期为10天,10天内可还原回原路径下,10天后则永久删除。
:param remote_path: 网盘中文件/目录的路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type remote_path: str
:return: Response 对象
"""
data = {
'path': remote_path
}
return self._request('file', 'delete', data=data, **kwargs) | def function[delete, parameter[self, remote_path]]:
constant[删除单个文件或目录.
.. warning::
* 文件/目录删除后默认临时存放在回收站内,删除文件或目录的临时存放
不占用用户的空间配额;
* 存放有效期为10天,10天内可还原回原路径下,10天后则永久删除。
:param remote_path: 网盘中文件/目录的路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\r, \n, \t, 空格, \0, \x0B`` 。
:type remote_path: str
:return: Response 对象
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1034af0>], [<ast.Name object at 0x7da1b1037f40>]]
return[call[name[self]._request, parameter[constant[file], constant[delete]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[remote_path] ,** identifier[kwargs] ):
literal[string]
identifier[data] ={
literal[string] : identifier[remote_path]
}
keyword[return] identifier[self] . identifier[_request] ( literal[string] , literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] ) | def delete(self, remote_path, **kwargs):
"""删除单个文件或目录.
.. warning::
* 文件/目录删除后默认临时存放在回收站内,删除文件或目录的临时存放
不占用用户的空间配额;
* 存放有效期为10天,10天内可还原回原路径下,10天后则永久删除。
:param remote_path: 网盘中文件/目录的路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type remote_path: str
:return: Response 对象
"""
data = {'path': remote_path}
return self._request('file', 'delete', data=data, **kwargs) |
def _build_tarball(src_repo) -> str:
""" Build a tarball from src and return the path to it """
run = partial(subprocess.run, cwd=src_repo, check=True)
run(['git', 'clean', '-xdff'])
src_repo = Path(src_repo)
if os.path.exists(src_repo / 'es' / 'upstream'):
run(['git', 'submodule', 'update', '--init', '--', 'es/upstream'])
run(['./gradlew', '--no-daemon', 'clean', 'distTar'])
distributions = Path(src_repo) / 'app' / 'build' / 'distributions'
return next(distributions.glob('crate-*.tar.gz')) | def function[_build_tarball, parameter[src_repo]]:
constant[ Build a tarball from src and return the path to it ]
variable[run] assign[=] call[name[partial], parameter[name[subprocess].run]]
call[name[run], parameter[list[[<ast.Constant object at 0x7da20c7cab30>, <ast.Constant object at 0x7da20c7c8520>, <ast.Constant object at 0x7da20c7cb1c0>]]]]
variable[src_repo] assign[=] call[name[Path], parameter[name[src_repo]]]
if call[name[os].path.exists, parameter[binary_operation[binary_operation[name[src_repo] / constant[es]] / constant[upstream]]]] begin[:]
call[name[run], parameter[list[[<ast.Constant object at 0x7da20c7cac80>, <ast.Constant object at 0x7da20c7cae30>, <ast.Constant object at 0x7da20c7c9f30>, <ast.Constant object at 0x7da20c7ca830>, <ast.Constant object at 0x7da20c7cb370>, <ast.Constant object at 0x7da20c7cbd30>]]]]
call[name[run], parameter[list[[<ast.Constant object at 0x7da20c7c84c0>, <ast.Constant object at 0x7da20c7c8be0>, <ast.Constant object at 0x7da20c7c9870>, <ast.Constant object at 0x7da20c7c9a20>]]]]
variable[distributions] assign[=] binary_operation[binary_operation[binary_operation[call[name[Path], parameter[name[src_repo]]] / constant[app]] / constant[build]] / constant[distributions]]
return[call[name[next], parameter[call[name[distributions].glob, parameter[constant[crate-*.tar.gz]]]]]] | keyword[def] identifier[_build_tarball] ( identifier[src_repo] )-> identifier[str] :
literal[string]
identifier[run] = identifier[partial] ( identifier[subprocess] . identifier[run] , identifier[cwd] = identifier[src_repo] , identifier[check] = keyword[True] )
identifier[run] ([ literal[string] , literal[string] , literal[string] ])
identifier[src_repo] = identifier[Path] ( identifier[src_repo] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[src_repo] / literal[string] / literal[string] ):
identifier[run] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[run] ([ literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[distributions] = identifier[Path] ( identifier[src_repo] )/ literal[string] / literal[string] / literal[string]
keyword[return] identifier[next] ( identifier[distributions] . identifier[glob] ( literal[string] )) | def _build_tarball(src_repo) -> str:
""" Build a tarball from src and return the path to it """
run = partial(subprocess.run, cwd=src_repo, check=True)
run(['git', 'clean', '-xdff'])
src_repo = Path(src_repo)
if os.path.exists(src_repo / 'es' / 'upstream'):
run(['git', 'submodule', 'update', '--init', '--', 'es/upstream']) # depends on [control=['if'], data=[]]
run(['./gradlew', '--no-daemon', 'clean', 'distTar'])
distributions = Path(src_repo) / 'app' / 'build' / 'distributions'
return next(distributions.glob('crate-*.tar.gz')) |
def get_identity(self, keygrip):
"""
Returns device.interface.Identity that matches specified keygrip.
In case of missing keygrip, KeyError will be raised.
"""
keygrip_bytes = binascii.unhexlify(keygrip)
pubkey_dict, user_ids = decode.load_by_keygrip(
pubkey_bytes=self.pubkey_bytes, keygrip=keygrip_bytes)
# We assume the first user ID is used to generate TREZOR-based GPG keys.
user_id = user_ids[0]['value'].decode('utf-8')
curve_name = protocol.get_curve_name_by_oid(pubkey_dict['curve_oid'])
ecdh = (pubkey_dict['algo'] == protocol.ECDH_ALGO_ID)
identity = client.create_identity(user_id=user_id, curve_name=curve_name)
verifying_key = self.client.pubkey(identity=identity, ecdh=ecdh)
pubkey = protocol.PublicKey(
curve_name=curve_name, created=pubkey_dict['created'],
verifying_key=verifying_key, ecdh=ecdh)
assert pubkey.key_id() == pubkey_dict['key_id']
assert pubkey.keygrip() == keygrip_bytes
return identity | def function[get_identity, parameter[self, keygrip]]:
constant[
Returns device.interface.Identity that matches specified keygrip.
In case of missing keygrip, KeyError will be raised.
]
variable[keygrip_bytes] assign[=] call[name[binascii].unhexlify, parameter[name[keygrip]]]
<ast.Tuple object at 0x7da18f00d720> assign[=] call[name[decode].load_by_keygrip, parameter[]]
variable[user_id] assign[=] call[call[call[name[user_ids]][constant[0]]][constant[value]].decode, parameter[constant[utf-8]]]
variable[curve_name] assign[=] call[name[protocol].get_curve_name_by_oid, parameter[call[name[pubkey_dict]][constant[curve_oid]]]]
variable[ecdh] assign[=] compare[call[name[pubkey_dict]][constant[algo]] equal[==] name[protocol].ECDH_ALGO_ID]
variable[identity] assign[=] call[name[client].create_identity, parameter[]]
variable[verifying_key] assign[=] call[name[self].client.pubkey, parameter[]]
variable[pubkey] assign[=] call[name[protocol].PublicKey, parameter[]]
assert[compare[call[name[pubkey].key_id, parameter[]] equal[==] call[name[pubkey_dict]][constant[key_id]]]]
assert[compare[call[name[pubkey].keygrip, parameter[]] equal[==] name[keygrip_bytes]]]
return[name[identity]] | keyword[def] identifier[get_identity] ( identifier[self] , identifier[keygrip] ):
literal[string]
identifier[keygrip_bytes] = identifier[binascii] . identifier[unhexlify] ( identifier[keygrip] )
identifier[pubkey_dict] , identifier[user_ids] = identifier[decode] . identifier[load_by_keygrip] (
identifier[pubkey_bytes] = identifier[self] . identifier[pubkey_bytes] , identifier[keygrip] = identifier[keygrip_bytes] )
identifier[user_id] = identifier[user_ids] [ literal[int] ][ literal[string] ]. identifier[decode] ( literal[string] )
identifier[curve_name] = identifier[protocol] . identifier[get_curve_name_by_oid] ( identifier[pubkey_dict] [ literal[string] ])
identifier[ecdh] =( identifier[pubkey_dict] [ literal[string] ]== identifier[protocol] . identifier[ECDH_ALGO_ID] )
identifier[identity] = identifier[client] . identifier[create_identity] ( identifier[user_id] = identifier[user_id] , identifier[curve_name] = identifier[curve_name] )
identifier[verifying_key] = identifier[self] . identifier[client] . identifier[pubkey] ( identifier[identity] = identifier[identity] , identifier[ecdh] = identifier[ecdh] )
identifier[pubkey] = identifier[protocol] . identifier[PublicKey] (
identifier[curve_name] = identifier[curve_name] , identifier[created] = identifier[pubkey_dict] [ literal[string] ],
identifier[verifying_key] = identifier[verifying_key] , identifier[ecdh] = identifier[ecdh] )
keyword[assert] identifier[pubkey] . identifier[key_id] ()== identifier[pubkey_dict] [ literal[string] ]
keyword[assert] identifier[pubkey] . identifier[keygrip] ()== identifier[keygrip_bytes]
keyword[return] identifier[identity] | def get_identity(self, keygrip):
"""
Returns device.interface.Identity that matches specified keygrip.
In case of missing keygrip, KeyError will be raised.
"""
keygrip_bytes = binascii.unhexlify(keygrip)
(pubkey_dict, user_ids) = decode.load_by_keygrip(pubkey_bytes=self.pubkey_bytes, keygrip=keygrip_bytes)
# We assume the first user ID is used to generate TREZOR-based GPG keys.
user_id = user_ids[0]['value'].decode('utf-8')
curve_name = protocol.get_curve_name_by_oid(pubkey_dict['curve_oid'])
ecdh = pubkey_dict['algo'] == protocol.ECDH_ALGO_ID
identity = client.create_identity(user_id=user_id, curve_name=curve_name)
verifying_key = self.client.pubkey(identity=identity, ecdh=ecdh)
pubkey = protocol.PublicKey(curve_name=curve_name, created=pubkey_dict['created'], verifying_key=verifying_key, ecdh=ecdh)
assert pubkey.key_id() == pubkey_dict['key_id']
assert pubkey.keygrip() == keygrip_bytes
return identity |
def QA_fetch_get_future_transaction_realtime(code, ip=None, port=None):
'期货历史成交分笔'
ip, port = get_extensionmarket_ip(ip, port)
apix = TdxExHq_API()
global extension_market_list
extension_market_list = QA_fetch_get_extensionmarket_list(
) if extension_market_list is None else extension_market_list
code_market = extension_market_list.query(
'code=="{}"'.format(code)).iloc[0]
with apix.connect(ip, port):
data = pd.DataFrame()
data = pd.concat([apix.to_df(apix.get_transaction_data(
int(code_market.market), code, (30 - i) * 1800)) for i in range(31)], axis=0)
return data.assign(datetime=pd.to_datetime(data['date'])).assign(date=lambda x: str(x)[0:10]) \
.assign(code=str(code)).assign(order=range(len(data.index))).set_index('datetime', drop=False,
inplace=False) | def function[QA_fetch_get_future_transaction_realtime, parameter[code, ip, port]]:
constant[期货历史成交分笔]
<ast.Tuple object at 0x7da1b1e18be0> assign[=] call[name[get_extensionmarket_ip], parameter[name[ip], name[port]]]
variable[apix] assign[=] call[name[TdxExHq_API], parameter[]]
<ast.Global object at 0x7da1b1e192d0>
variable[extension_market_list] assign[=] <ast.IfExp object at 0x7da1b1e183a0>
variable[code_market] assign[=] call[call[name[extension_market_list].query, parameter[call[constant[code=="{}"].format, parameter[name[code]]]]].iloc][constant[0]]
with call[name[apix].connect, parameter[name[ip], name[port]]] begin[:]
variable[data] assign[=] call[name[pd].DataFrame, parameter[]]
variable[data] assign[=] call[name[pd].concat, parameter[<ast.ListComp object at 0x7da1b1ea0cd0>]]
return[call[call[call[call[call[name[data].assign, parameter[]].assign, parameter[]].assign, parameter[]].assign, parameter[]].set_index, parameter[constant[datetime]]]] | keyword[def] identifier[QA_fetch_get_future_transaction_realtime] ( identifier[code] , identifier[ip] = keyword[None] , identifier[port] = keyword[None] ):
literal[string]
identifier[ip] , identifier[port] = identifier[get_extensionmarket_ip] ( identifier[ip] , identifier[port] )
identifier[apix] = identifier[TdxExHq_API] ()
keyword[global] identifier[extension_market_list]
identifier[extension_market_list] = identifier[QA_fetch_get_extensionmarket_list] (
) keyword[if] identifier[extension_market_list] keyword[is] keyword[None] keyword[else] identifier[extension_market_list]
identifier[code_market] = identifier[extension_market_list] . identifier[query] (
literal[string] . identifier[format] ( identifier[code] )). identifier[iloc] [ literal[int] ]
keyword[with] identifier[apix] . identifier[connect] ( identifier[ip] , identifier[port] ):
identifier[data] = identifier[pd] . identifier[DataFrame] ()
identifier[data] = identifier[pd] . identifier[concat] ([ identifier[apix] . identifier[to_df] ( identifier[apix] . identifier[get_transaction_data] (
identifier[int] ( identifier[code_market] . identifier[market] ), identifier[code] ,( literal[int] - identifier[i] )* literal[int] )) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )], identifier[axis] = literal[int] )
keyword[return] identifier[data] . identifier[assign] ( identifier[datetime] = identifier[pd] . identifier[to_datetime] ( identifier[data] [ literal[string] ])). identifier[assign] ( identifier[date] = keyword[lambda] identifier[x] : identifier[str] ( identifier[x] )[ literal[int] : literal[int] ]). identifier[assign] ( identifier[code] = identifier[str] ( identifier[code] )). identifier[assign] ( identifier[order] = identifier[range] ( identifier[len] ( identifier[data] . identifier[index] ))). identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] ,
identifier[inplace] = keyword[False] ) | def QA_fetch_get_future_transaction_realtime(code, ip=None, port=None):
"""期货历史成交分笔"""
(ip, port) = get_extensionmarket_ip(ip, port)
apix = TdxExHq_API()
global extension_market_list
extension_market_list = QA_fetch_get_extensionmarket_list() if extension_market_list is None else extension_market_list
code_market = extension_market_list.query('code=="{}"'.format(code)).iloc[0]
with apix.connect(ip, port):
data = pd.DataFrame()
data = pd.concat([apix.to_df(apix.get_transaction_data(int(code_market.market), code, (30 - i) * 1800)) for i in range(31)], axis=0)
return data.assign(datetime=pd.to_datetime(data['date'])).assign(date=lambda x: str(x)[0:10]).assign(code=str(code)).assign(order=range(len(data.index))).set_index('datetime', drop=False, inplace=False) # depends on [control=['with'], data=[]] |
def FreedmanDiaconisBinSize(feature_values):
"""
The bin size in FD-binning is given by size = 2 * IQR(x) * n^(-1/3)
More Info: https://en.wikipedia.org/wiki/Freedman%E2%80%93Diaconis_rule
If the BinSize ends up being 0 (in the case that all values are the same),
return a BinSize of 1.
"""
q75, q25 = numpy.percentile(feature_values, [75, 25])
IQR = q75 - q25
return 2.0 * IQR * len(feature_values) ** (-1.0/3.0) | def function[FreedmanDiaconisBinSize, parameter[feature_values]]:
constant[
The bin size in FD-binning is given by size = 2 * IQR(x) * n^(-1/3)
More Info: https://en.wikipedia.org/wiki/Freedman%E2%80%93Diaconis_rule
If the BinSize ends up being 0 (in the case that all values are the same),
return a BinSize of 1.
]
<ast.Tuple object at 0x7da1b05c6e30> assign[=] call[name[numpy].percentile, parameter[name[feature_values], list[[<ast.Constant object at 0x7da1b05c7a60>, <ast.Constant object at 0x7da1b05c5570>]]]]
variable[IQR] assign[=] binary_operation[name[q75] - name[q25]]
return[binary_operation[binary_operation[constant[2.0] * name[IQR]] * binary_operation[call[name[len], parameter[name[feature_values]]] ** binary_operation[<ast.UnaryOp object at 0x7da1b0351e40> / constant[3.0]]]]] | keyword[def] identifier[FreedmanDiaconisBinSize] ( identifier[feature_values] ):
literal[string]
identifier[q75] , identifier[q25] = identifier[numpy] . identifier[percentile] ( identifier[feature_values] ,[ literal[int] , literal[int] ])
identifier[IQR] = identifier[q75] - identifier[q25]
keyword[return] literal[int] * identifier[IQR] * identifier[len] ( identifier[feature_values] )**(- literal[int] / literal[int] ) | def FreedmanDiaconisBinSize(feature_values):
"""
The bin size in FD-binning is given by size = 2 * IQR(x) * n^(-1/3)
More Info: https://en.wikipedia.org/wiki/Freedman%E2%80%93Diaconis_rule
If the BinSize ends up being 0 (in the case that all values are the same),
return a BinSize of 1.
"""
(q75, q25) = numpy.percentile(feature_values, [75, 25])
IQR = q75 - q25
return 2.0 * IQR * len(feature_values) ** (-1.0 / 3.0) |
def all(self):
"""
Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once.
"""
return self.rpc_model.search_read_all(
self.domain,
self._order_by,
self.fields,
context=self.context,
offset=self._offset or 0,
limit=self._limit,
) | def function[all, parameter[self]]:
constant[
Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once.
]
return[call[name[self].rpc_model.search_read_all, parameter[name[self].domain, name[self]._order_by, name[self].fields]]] | keyword[def] identifier[all] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[rpc_model] . identifier[search_read_all] (
identifier[self] . identifier[domain] ,
identifier[self] . identifier[_order_by] ,
identifier[self] . identifier[fields] ,
identifier[context] = identifier[self] . identifier[context] ,
identifier[offset] = identifier[self] . identifier[_offset] keyword[or] literal[int] ,
identifier[limit] = identifier[self] . identifier[_limit] ,
) | def all(self):
"""
Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once.
"""
return self.rpc_model.search_read_all(self.domain, self._order_by, self.fields, context=self.context, offset=self._offset or 0, limit=self._limit) |
def _download(self):
"""Download the page."""
print("Downloading!")
def parse(result):
print("Got %r back from Yahoo." % (result,))
values = result.strip().split(",")
self._value = float(values[1])
d = getPage(
"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X"
% (self._name,))
d.addCallback(parse)
d.addErrback(log.err)
return d | def function[_download, parameter[self]]:
constant[Download the page.]
call[name[print], parameter[constant[Downloading!]]]
def function[parse, parameter[result]]:
call[name[print], parameter[binary_operation[constant[Got %r back from Yahoo.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9b160>]]]]]
variable[values] assign[=] call[call[name[result].strip, parameter[]].split, parameter[constant[,]]]
name[self]._value assign[=] call[name[float], parameter[call[name[values]][constant[1]]]]
variable[d] assign[=] call[name[getPage], parameter[binary_operation[constant[http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f98910>]]]]]
call[name[d].addCallback, parameter[name[parse]]]
call[name[d].addErrback, parameter[name[log].err]]
return[name[d]] | keyword[def] identifier[_download] ( identifier[self] ):
literal[string]
identifier[print] ( literal[string] )
keyword[def] identifier[parse] ( identifier[result] ):
identifier[print] ( literal[string] %( identifier[result] ,))
identifier[values] = identifier[result] . identifier[strip] (). identifier[split] ( literal[string] )
identifier[self] . identifier[_value] = identifier[float] ( identifier[values] [ literal[int] ])
identifier[d] = identifier[getPage] (
literal[string]
%( identifier[self] . identifier[_name] ,))
identifier[d] . identifier[addCallback] ( identifier[parse] )
identifier[d] . identifier[addErrback] ( identifier[log] . identifier[err] )
keyword[return] identifier[d] | def _download(self):
"""Download the page."""
print('Downloading!')
def parse(result):
print('Got %r back from Yahoo.' % (result,))
values = result.strip().split(',')
self._value = float(values[1])
d = getPage('http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X' % (self._name,))
d.addCallback(parse)
d.addErrback(log.err)
return d |
def extend(klass, name=None):
'''A function decorator for extending an existing class.
Use as a decorator for functions to add to an existing class.
Args:
klass: The class to be decorated.
name: The name the new method is to be given in the klass class.
Returns:
A decorator function which accepts a single function as its only
argument. The decorated function will be added to class klass.
Raises:
ValueError: If klass already has an attribute with the same name as the
extension method.
'''
def decorator(f):
return add_method(f, klass, name)
return decorator | def function[extend, parameter[klass, name]]:
constant[A function decorator for extending an existing class.
Use as a decorator for functions to add to an existing class.
Args:
klass: The class to be decorated.
name: The name the new method is to be given in the klass class.
Returns:
A decorator function which accepts a single function as its only
argument. The decorated function will be added to class klass.
Raises:
ValueError: If klass already has an attribute with the same name as the
extension method.
]
def function[decorator, parameter[f]]:
return[call[name[add_method], parameter[name[f], name[klass], name[name]]]]
return[name[decorator]] | keyword[def] identifier[extend] ( identifier[klass] , identifier[name] = keyword[None] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
keyword[return] identifier[add_method] ( identifier[f] , identifier[klass] , identifier[name] )
keyword[return] identifier[decorator] | def extend(klass, name=None):
"""A function decorator for extending an existing class.
Use as a decorator for functions to add to an existing class.
Args:
klass: The class to be decorated.
name: The name the new method is to be given in the klass class.
Returns:
A decorator function which accepts a single function as its only
argument. The decorated function will be added to class klass.
Raises:
ValueError: If klass already has an attribute with the same name as the
extension method.
"""
def decorator(f):
return add_method(f, klass, name)
return decorator |
def ensure_parent_directory(path, ensure_parent=True):
"""
Ensures the parent directory exists.
:param string path: the path of the file
:param bool ensure_parent: if ``True``, ensure the parent directory of ``path`` exists;
if ``False``, ensure ``path`` exists
:raises: OSError: if the path cannot be created
"""
parent_directory = os.path.abspath(path)
if ensure_parent:
parent_directory = os.path.dirname(parent_directory)
if not os.path.exists(parent_directory):
try:
os.makedirs(parent_directory)
except (IOError, OSError):
raise OSError(u"Directory '%s' cannot be created" % parent_directory) | def function[ensure_parent_directory, parameter[path, ensure_parent]]:
constant[
Ensures the parent directory exists.
:param string path: the path of the file
:param bool ensure_parent: if ``True``, ensure the parent directory of ``path`` exists;
if ``False``, ensure ``path`` exists
:raises: OSError: if the path cannot be created
]
variable[parent_directory] assign[=] call[name[os].path.abspath, parameter[name[path]]]
if name[ensure_parent] begin[:]
variable[parent_directory] assign[=] call[name[os].path.dirname, parameter[name[parent_directory]]]
if <ast.UnaryOp object at 0x7da1b1883af0> begin[:]
<ast.Try object at 0x7da1b1883e80> | keyword[def] identifier[ensure_parent_directory] ( identifier[path] , identifier[ensure_parent] = keyword[True] ):
literal[string]
identifier[parent_directory] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] )
keyword[if] identifier[ensure_parent] :
identifier[parent_directory] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[parent_directory] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[parent_directory] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[parent_directory] )
keyword[except] ( identifier[IOError] , identifier[OSError] ):
keyword[raise] identifier[OSError] ( literal[string] % identifier[parent_directory] ) | def ensure_parent_directory(path, ensure_parent=True):
"""
Ensures the parent directory exists.
:param string path: the path of the file
:param bool ensure_parent: if ``True``, ensure the parent directory of ``path`` exists;
if ``False``, ensure ``path`` exists
:raises: OSError: if the path cannot be created
"""
parent_directory = os.path.abspath(path)
if ensure_parent:
parent_directory = os.path.dirname(parent_directory) # depends on [control=['if'], data=[]]
if not os.path.exists(parent_directory):
try:
os.makedirs(parent_directory) # depends on [control=['try'], data=[]]
except (IOError, OSError):
raise OSError(u"Directory '%s' cannot be created" % parent_directory) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def create_app():
"""Create Flask app."""
config = load_config()
app = Flask(__name__)
app.config.from_object(config)
# Proxy fix
app.wsgi_app = ProxyFix(app.wsgi_app)
# CSRF protect
CsrfProtect(app)
if app.debug or app.testing:
DebugToolbarExtension(app)
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/pages': os.path.join(app.config.get('PROJECT_PATH'), 'application/pages')
})
else:
# Log errors to stderr in production mode
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.ERROR)
# Enable Sentry
if app.config.get('SENTRY_DSN'):
from .utils.sentry import sentry
sentry.init_app(app, dsn=app.config.get('SENTRY_DSN'))
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/static': os.path.join(app.config.get('PROJECT_PATH'), 'output/static'),
'/pkg': os.path.join(app.config.get('PROJECT_PATH'), 'output/pkg'),
'/pages': os.path.join(app.config.get('PROJECT_PATH'), 'output/pages')
})
# Register components
register_db(app)
register_routes(app)
register_jinja(app)
register_error_handle(app)
register_hooks(app)
return app | def function[create_app, parameter[]]:
constant[Create Flask app.]
variable[config] assign[=] call[name[load_config], parameter[]]
variable[app] assign[=] call[name[Flask], parameter[name[__name__]]]
call[name[app].config.from_object, parameter[name[config]]]
name[app].wsgi_app assign[=] call[name[ProxyFix], parameter[name[app].wsgi_app]]
call[name[CsrfProtect], parameter[name[app]]]
if <ast.BoolOp object at 0x7da2049627d0> begin[:]
call[name[DebugToolbarExtension], parameter[name[app]]]
name[app].wsgi_app assign[=] call[name[SharedDataMiddleware], parameter[name[app].wsgi_app, dictionary[[<ast.Constant object at 0x7da204963610>], [<ast.Call object at 0x7da204962d40>]]]]
call[name[register_db], parameter[name[app]]]
call[name[register_routes], parameter[name[app]]]
call[name[register_jinja], parameter[name[app]]]
call[name[register_error_handle], parameter[name[app]]]
call[name[register_hooks], parameter[name[app]]]
return[name[app]] | keyword[def] identifier[create_app] ():
literal[string]
identifier[config] = identifier[load_config] ()
identifier[app] = identifier[Flask] ( identifier[__name__] )
identifier[app] . identifier[config] . identifier[from_object] ( identifier[config] )
identifier[app] . identifier[wsgi_app] = identifier[ProxyFix] ( identifier[app] . identifier[wsgi_app] )
identifier[CsrfProtect] ( identifier[app] )
keyword[if] identifier[app] . identifier[debug] keyword[or] identifier[app] . identifier[testing] :
identifier[DebugToolbarExtension] ( identifier[app] )
identifier[app] . identifier[wsgi_app] = identifier[SharedDataMiddleware] ( identifier[app] . identifier[wsgi_app] ,{
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[app] . identifier[config] . identifier[get] ( literal[string] ), literal[string] )
})
keyword[else] :
identifier[app] . identifier[logger] . identifier[addHandler] ( identifier[logging] . identifier[StreamHandler] ())
identifier[app] . identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[ERROR] )
keyword[if] identifier[app] . identifier[config] . identifier[get] ( literal[string] ):
keyword[from] . identifier[utils] . identifier[sentry] keyword[import] identifier[sentry]
identifier[sentry] . identifier[init_app] ( identifier[app] , identifier[dsn] = identifier[app] . identifier[config] . identifier[get] ( literal[string] ))
identifier[app] . identifier[wsgi_app] = identifier[SharedDataMiddleware] ( identifier[app] . identifier[wsgi_app] ,{
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[app] . identifier[config] . identifier[get] ( literal[string] ), literal[string] ),
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[app] . identifier[config] . identifier[get] ( literal[string] ), literal[string] ),
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[app] . identifier[config] . identifier[get] ( literal[string] ), literal[string] )
})
identifier[register_db] ( identifier[app] )
identifier[register_routes] ( identifier[app] )
identifier[register_jinja] ( identifier[app] )
identifier[register_error_handle] ( identifier[app] )
identifier[register_hooks] ( identifier[app] )
keyword[return] identifier[app] | def create_app():
"""Create Flask app."""
config = load_config()
app = Flask(__name__)
app.config.from_object(config)
# Proxy fix
app.wsgi_app = ProxyFix(app.wsgi_app)
# CSRF protect
CsrfProtect(app)
if app.debug or app.testing:
DebugToolbarExtension(app)
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/pages': os.path.join(app.config.get('PROJECT_PATH'), 'application/pages')}) # depends on [control=['if'], data=[]]
else:
# Log errors to stderr in production mode
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.ERROR)
# Enable Sentry
if app.config.get('SENTRY_DSN'):
from .utils.sentry import sentry
sentry.init_app(app, dsn=app.config.get('SENTRY_DSN')) # depends on [control=['if'], data=[]]
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/static': os.path.join(app.config.get('PROJECT_PATH'), 'output/static'), '/pkg': os.path.join(app.config.get('PROJECT_PATH'), 'output/pkg'), '/pages': os.path.join(app.config.get('PROJECT_PATH'), 'output/pages')})
# Register components
register_db(app)
register_routes(app)
register_jinja(app)
register_error_handle(app)
register_hooks(app)
return app |
def parallel_concat_lcdir(lcbasedir,
objectidlist,
aperture='TF1',
postfix='.gz',
sortby='rjd',
normalize=True,
outdir=None,
recursive=True,
nworkers=32,
maxworkertasks=1000):
'''This concatenates all text LCs for the given objectidlist.
'''
if not outdir:
outdir = 'pklcs'
if not os.path.exists(outdir):
os.mkdir(outdir)
tasks = [(lcbasedir, x, {'aperture':aperture,
'postfix':postfix,
'sortby':sortby,
'normalize':normalize,
'outdir':outdir,
'recursive':recursive}) for x in objectidlist]
pool = mp.Pool(nworkers, maxtasksperchild=maxworkertasks)
results = pool.map(parallel_concat_worker, tasks)
pool.close()
pool.join()
return {x:y for (x,y) in zip(objectidlist, results)} | def function[parallel_concat_lcdir, parameter[lcbasedir, objectidlist, aperture, postfix, sortby, normalize, outdir, recursive, nworkers, maxworkertasks]]:
constant[This concatenates all text LCs for the given objectidlist.
]
if <ast.UnaryOp object at 0x7da1b01403d0> begin[:]
variable[outdir] assign[=] constant[pklcs]
if <ast.UnaryOp object at 0x7da1b00b68f0> begin[:]
call[name[os].mkdir, parameter[name[outdir]]]
variable[tasks] assign[=] <ast.ListComp object at 0x7da1b00b5630>
variable[pool] assign[=] call[name[mp].Pool, parameter[name[nworkers]]]
variable[results] assign[=] call[name[pool].map, parameter[name[parallel_concat_worker], name[tasks]]]
call[name[pool].close, parameter[]]
call[name[pool].join, parameter[]]
return[<ast.DictComp object at 0x7da2044c2410>] | keyword[def] identifier[parallel_concat_lcdir] ( identifier[lcbasedir] ,
identifier[objectidlist] ,
identifier[aperture] = literal[string] ,
identifier[postfix] = literal[string] ,
identifier[sortby] = literal[string] ,
identifier[normalize] = keyword[True] ,
identifier[outdir] = keyword[None] ,
identifier[recursive] = keyword[True] ,
identifier[nworkers] = literal[int] ,
identifier[maxworkertasks] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[outdir] :
identifier[outdir] = literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[outdir] ):
identifier[os] . identifier[mkdir] ( identifier[outdir] )
identifier[tasks] =[( identifier[lcbasedir] , identifier[x] ,{ literal[string] : identifier[aperture] ,
literal[string] : identifier[postfix] ,
literal[string] : identifier[sortby] ,
literal[string] : identifier[normalize] ,
literal[string] : identifier[outdir] ,
literal[string] : identifier[recursive] }) keyword[for] identifier[x] keyword[in] identifier[objectidlist] ]
identifier[pool] = identifier[mp] . identifier[Pool] ( identifier[nworkers] , identifier[maxtasksperchild] = identifier[maxworkertasks] )
identifier[results] = identifier[pool] . identifier[map] ( identifier[parallel_concat_worker] , identifier[tasks] )
identifier[pool] . identifier[close] ()
identifier[pool] . identifier[join] ()
keyword[return] { identifier[x] : identifier[y] keyword[for] ( identifier[x] , identifier[y] ) keyword[in] identifier[zip] ( identifier[objectidlist] , identifier[results] )} | def parallel_concat_lcdir(lcbasedir, objectidlist, aperture='TF1', postfix='.gz', sortby='rjd', normalize=True, outdir=None, recursive=True, nworkers=32, maxworkertasks=1000):
"""This concatenates all text LCs for the given objectidlist.
"""
if not outdir:
outdir = 'pklcs' # depends on [control=['if'], data=[]]
if not os.path.exists(outdir):
os.mkdir(outdir) # depends on [control=['if'], data=[]]
tasks = [(lcbasedir, x, {'aperture': aperture, 'postfix': postfix, 'sortby': sortby, 'normalize': normalize, 'outdir': outdir, 'recursive': recursive}) for x in objectidlist]
pool = mp.Pool(nworkers, maxtasksperchild=maxworkertasks)
results = pool.map(parallel_concat_worker, tasks)
pool.close()
pool.join()
return {x: y for (x, y) in zip(objectidlist, results)} |
def as_user(self, user, password):
"""
A context-manager for ``get_user_client``. Allows the execution
of commands as a different user with ease.
Example:
>>> c.cmd.vol_list()
>>> with c.as_user("user", "password"):
... c.cmd.vol_list()
"""
with self.options(user=user, password=password):
yield self | def function[as_user, parameter[self, user, password]]:
constant[
A context-manager for ``get_user_client``. Allows the execution
of commands as a different user with ease.
Example:
>>> c.cmd.vol_list()
>>> with c.as_user("user", "password"):
... c.cmd.vol_list()
]
with call[name[self].options, parameter[]] begin[:]
<ast.Yield object at 0x7da1b1905240> | keyword[def] identifier[as_user] ( identifier[self] , identifier[user] , identifier[password] ):
literal[string]
keyword[with] identifier[self] . identifier[options] ( identifier[user] = identifier[user] , identifier[password] = identifier[password] ):
keyword[yield] identifier[self] | def as_user(self, user, password):
"""
A context-manager for ``get_user_client``. Allows the execution
of commands as a different user with ease.
Example:
>>> c.cmd.vol_list()
>>> with c.as_user("user", "password"):
... c.cmd.vol_list()
"""
with self.options(user=user, password=password):
yield self # depends on [control=['with'], data=[]] |
def query_context(self, regions, file_factory=PythonFile):
"""
Return which set of test contexts intersect a set of code regions.
Parameters
----------
regions: A sequence of Intervals
file_factory: Callable (optional, default PythonFile)
A callable that takes a filename and
returns a PythonFile object.
Returns
-------
A QueryResult
"""
result = set()
for region in regions:
try:
pf = file_factory(region.filename)
except InvalidPythonFile:
continue
# region and/or coverage report may use paths
# relative to this directory. Ensure we find a match
# if they use different conventions.
paths = {
os.path.abspath(region.filename),
os.path.relpath(region.filename)
}
for test_context, hits in six.iteritems(self.data):
if test_context in result:
continue
for path in paths:
if region.intersects(pf, hits.get(path, [])):
result.add(test_context)
return QueryResult(result) | def function[query_context, parameter[self, regions, file_factory]]:
constant[
Return which set of test contexts intersect a set of code regions.
Parameters
----------
regions: A sequence of Intervals
file_factory: Callable (optional, default PythonFile)
A callable that takes a filename and
returns a PythonFile object.
Returns
-------
A QueryResult
]
variable[result] assign[=] call[name[set], parameter[]]
for taget[name[region]] in starred[name[regions]] begin[:]
<ast.Try object at 0x7da2054a5900>
variable[paths] assign[=] <ast.Set object at 0x7da2054a4b80>
for taget[tuple[[<ast.Name object at 0x7da2054a69e0>, <ast.Name object at 0x7da2054a5870>]]] in starred[call[name[six].iteritems, parameter[name[self].data]]] begin[:]
if compare[name[test_context] in name[result]] begin[:]
continue
for taget[name[path]] in starred[name[paths]] begin[:]
if call[name[region].intersects, parameter[name[pf], call[name[hits].get, parameter[name[path], list[[]]]]]] begin[:]
call[name[result].add, parameter[name[test_context]]]
return[call[name[QueryResult], parameter[name[result]]]] | keyword[def] identifier[query_context] ( identifier[self] , identifier[regions] , identifier[file_factory] = identifier[PythonFile] ):
literal[string]
identifier[result] = identifier[set] ()
keyword[for] identifier[region] keyword[in] identifier[regions] :
keyword[try] :
identifier[pf] = identifier[file_factory] ( identifier[region] . identifier[filename] )
keyword[except] identifier[InvalidPythonFile] :
keyword[continue]
identifier[paths] ={
identifier[os] . identifier[path] . identifier[abspath] ( identifier[region] . identifier[filename] ),
identifier[os] . identifier[path] . identifier[relpath] ( identifier[region] . identifier[filename] )
}
keyword[for] identifier[test_context] , identifier[hits] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[data] ):
keyword[if] identifier[test_context] keyword[in] identifier[result] :
keyword[continue]
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[if] identifier[region] . identifier[intersects] ( identifier[pf] , identifier[hits] . identifier[get] ( identifier[path] ,[])):
identifier[result] . identifier[add] ( identifier[test_context] )
keyword[return] identifier[QueryResult] ( identifier[result] ) | def query_context(self, regions, file_factory=PythonFile):
"""
Return which set of test contexts intersect a set of code regions.
Parameters
----------
regions: A sequence of Intervals
file_factory: Callable (optional, default PythonFile)
A callable that takes a filename and
returns a PythonFile object.
Returns
-------
A QueryResult
"""
result = set()
for region in regions:
try:
pf = file_factory(region.filename) # depends on [control=['try'], data=[]]
except InvalidPythonFile:
continue # depends on [control=['except'], data=[]]
# region and/or coverage report may use paths
# relative to this directory. Ensure we find a match
# if they use different conventions.
paths = {os.path.abspath(region.filename), os.path.relpath(region.filename)}
for (test_context, hits) in six.iteritems(self.data):
if test_context in result:
continue # depends on [control=['if'], data=[]]
for path in paths:
if region.intersects(pf, hits.get(path, [])):
result.add(test_context) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['region']]
return QueryResult(result) |
def post_card(message,
hook_url=None,
title=None,
theme_color=None):
'''
Send a message to an MS Teams channel.
:param message: The message to send to the MS Teams channel.
:param hook_url: The Teams webhook URL, if not specified in the configuration.
:param title: Optional title for the posted card
:param theme_color: Optional hex color highlight for the posted card
:return: Boolean if message was sent successfully.
CLI Example:
.. code-block:: bash
salt '*' msteams.post_card message="Build is done"
'''
if not hook_url:
hook_url = _get_hook_url()
if not message:
log.error('message is a required option.')
payload = {
"text": message,
"title": title,
"themeColor": theme_color
}
result = salt.utils.http.query(hook_url,
method='POST',
data=salt.utils.json.dumps(payload),
status=True)
if result['status'] <= 201:
return True
else:
return {
'res': False,
'message': result.get('body', result['status'])
} | def function[post_card, parameter[message, hook_url, title, theme_color]]:
constant[
Send a message to an MS Teams channel.
:param message: The message to send to the MS Teams channel.
:param hook_url: The Teams webhook URL, if not specified in the configuration.
:param title: Optional title for the posted card
:param theme_color: Optional hex color highlight for the posted card
:return: Boolean if message was sent successfully.
CLI Example:
.. code-block:: bash
salt '*' msteams.post_card message="Build is done"
]
if <ast.UnaryOp object at 0x7da1b21a09d0> begin[:]
variable[hook_url] assign[=] call[name[_get_hook_url], parameter[]]
if <ast.UnaryOp object at 0x7da1b21a22c0> begin[:]
call[name[log].error, parameter[constant[message is a required option.]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b21a3be0>, <ast.Constant object at 0x7da1b21a37c0>, <ast.Constant object at 0x7da1b21a3ca0>], [<ast.Name object at 0x7da1b21a3a30>, <ast.Name object at 0x7da1b21a38b0>, <ast.Name object at 0x7da1b21a1180>]]
variable[result] assign[=] call[name[salt].utils.http.query, parameter[name[hook_url]]]
if compare[call[name[result]][constant[status]] less_or_equal[<=] constant[201]] begin[:]
return[constant[True]] | keyword[def] identifier[post_card] ( identifier[message] ,
identifier[hook_url] = keyword[None] ,
identifier[title] = keyword[None] ,
identifier[theme_color] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[hook_url] :
identifier[hook_url] = identifier[_get_hook_url] ()
keyword[if] keyword[not] identifier[message] :
identifier[log] . identifier[error] ( literal[string] )
identifier[payload] ={
literal[string] : identifier[message] ,
literal[string] : identifier[title] ,
literal[string] : identifier[theme_color]
}
identifier[result] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] ( identifier[hook_url] ,
identifier[method] = literal[string] ,
identifier[data] = identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[payload] ),
identifier[status] = keyword[True] )
keyword[if] identifier[result] [ literal[string] ]<= literal[int] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] {
literal[string] : keyword[False] ,
literal[string] : identifier[result] . identifier[get] ( literal[string] , identifier[result] [ literal[string] ])
} | def post_card(message, hook_url=None, title=None, theme_color=None):
"""
Send a message to an MS Teams channel.
:param message: The message to send to the MS Teams channel.
:param hook_url: The Teams webhook URL, if not specified in the configuration.
:param title: Optional title for the posted card
:param theme_color: Optional hex color highlight for the posted card
:return: Boolean if message was sent successfully.
CLI Example:
.. code-block:: bash
salt '*' msteams.post_card message="Build is done"
"""
if not hook_url:
hook_url = _get_hook_url() # depends on [control=['if'], data=[]]
if not message:
log.error('message is a required option.') # depends on [control=['if'], data=[]]
payload = {'text': message, 'title': title, 'themeColor': theme_color}
result = salt.utils.http.query(hook_url, method='POST', data=salt.utils.json.dumps(payload), status=True)
if result['status'] <= 201:
return True # depends on [control=['if'], data=[]]
else:
return {'res': False, 'message': result.get('body', result['status'])} |
def _load_result(response, ret):
'''
format the results of listing functions
'''
#were we able to connect?
if response['code'] is None:
ret['comment'] = response['content']
#forbidden?
elif response['code'] == 401:
ret['comment'] = '401 Forbidden: Authentication required!'
#Not found?
elif response['code'] == 404:
ret['comment'] = response['content']['message']
#200?
elif response['code'] == 200:
ret['result'] = True
ret['comment'] = 'Listing Current Configuration Only. ' \
'Not action or changes occurred during the execution of this state.'
ret['changes'] = response['content']
#something bad
else:
ret['comment'] = response['content']['message']
return ret | def function[_load_result, parameter[response, ret]]:
constant[
format the results of listing functions
]
if compare[call[name[response]][constant[code]] is constant[None]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[name[response]][constant[content]]
return[name[ret]] | keyword[def] identifier[_load_result] ( identifier[response] , identifier[ret] ):
literal[string]
keyword[if] identifier[response] [ literal[string] ] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= identifier[response] [ literal[string] ]
keyword[elif] identifier[response] [ literal[string] ]== literal[int] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[elif] identifier[response] [ literal[string] ]== literal[int] :
identifier[ret] [ literal[string] ]= identifier[response] [ literal[string] ][ literal[string] ]
keyword[elif] identifier[response] [ literal[string] ]== literal[int] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] literal[string]
identifier[ret] [ literal[string] ]= identifier[response] [ literal[string] ]
keyword[else] :
identifier[ret] [ literal[string] ]= identifier[response] [ literal[string] ][ literal[string] ]
keyword[return] identifier[ret] | def _load_result(response, ret):
"""
format the results of listing functions
"""
#were we able to connect?
if response['code'] is None:
ret['comment'] = response['content'] # depends on [control=['if'], data=[]]
#forbidden?
elif response['code'] == 401:
ret['comment'] = '401 Forbidden: Authentication required!' # depends on [control=['if'], data=[]]
#Not found?
elif response['code'] == 404:
ret['comment'] = response['content']['message'] # depends on [control=['if'], data=[]]
#200?
elif response['code'] == 200:
ret['result'] = True
ret['comment'] = 'Listing Current Configuration Only. Not action or changes occurred during the execution of this state.'
ret['changes'] = response['content'] # depends on [control=['if'], data=[]]
else:
#something bad
ret['comment'] = response['content']['message']
return ret |
def register_endpoints(self, backend_names):
"""
See super class satosa.frontends.base.FrontendModule
:type backend_names: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:raise ValueError: if more than one backend is configured
"""
url_map = [("^{}".format(self.name), self.ping_endpoint)]
return url_map | def function[register_endpoints, parameter[self, backend_names]]:
constant[
See super class satosa.frontends.base.FrontendModule
:type backend_names: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:raise ValueError: if more than one backend is configured
]
variable[url_map] assign[=] list[[<ast.Tuple object at 0x7da1b155dba0>]]
return[name[url_map]] | keyword[def] identifier[register_endpoints] ( identifier[self] , identifier[backend_names] ):
literal[string]
identifier[url_map] =[( literal[string] . identifier[format] ( identifier[self] . identifier[name] ), identifier[self] . identifier[ping_endpoint] )]
keyword[return] identifier[url_map] | def register_endpoints(self, backend_names):
"""
See super class satosa.frontends.base.FrontendModule
:type backend_names: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:raise ValueError: if more than one backend is configured
"""
url_map = [('^{}'.format(self.name), self.ping_endpoint)]
return url_map |
def parse_sphinx_searchindex(searchindex):
"""Parse a Sphinx search index
Parameters
----------
searchindex : str
The Sphinx search index (contents of searchindex.js)
Returns
-------
filenames : list of str
The file names parsed from the search index.
objects : dict
The objects parsed from the search index.
"""
# Make sure searchindex uses UTF-8 encoding
if hasattr(searchindex, 'decode'):
searchindex = searchindex.decode('UTF-8')
# parse objects
query = 'objects:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"objects:" not found in search index')
sel = _select_block(searchindex[pos:], '{', '}')
objects = _parse_dict_recursive(sel)
# parse filenames
query = 'filenames:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"filenames:" not found in search index')
filenames = searchindex[pos + len(query) + 1:]
filenames = filenames[:filenames.find(']')]
filenames = [f.strip('"') for f in filenames.split(',')]
return filenames, objects | def function[parse_sphinx_searchindex, parameter[searchindex]]:
constant[Parse a Sphinx search index
Parameters
----------
searchindex : str
The Sphinx search index (contents of searchindex.js)
Returns
-------
filenames : list of str
The file names parsed from the search index.
objects : dict
The objects parsed from the search index.
]
if call[name[hasattr], parameter[name[searchindex], constant[decode]]] begin[:]
variable[searchindex] assign[=] call[name[searchindex].decode, parameter[constant[UTF-8]]]
variable[query] assign[=] constant[objects:]
variable[pos] assign[=] call[name[searchindex].find, parameter[name[query]]]
if compare[name[pos] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da18c4cc6a0>
variable[sel] assign[=] call[name[_select_block], parameter[call[name[searchindex]][<ast.Slice object at 0x7da18c4ce5c0>], constant[{], constant[}]]]
variable[objects] assign[=] call[name[_parse_dict_recursive], parameter[name[sel]]]
variable[query] assign[=] constant[filenames:]
variable[pos] assign[=] call[name[searchindex].find, parameter[name[query]]]
if compare[name[pos] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da18c4cf460>
variable[filenames] assign[=] call[name[searchindex]][<ast.Slice object at 0x7da18c4cfc70>]
variable[filenames] assign[=] call[name[filenames]][<ast.Slice object at 0x7da18c4cee00>]
variable[filenames] assign[=] <ast.ListComp object at 0x7da18c4cc160>
return[tuple[[<ast.Name object at 0x7da18c4cf970>, <ast.Name object at 0x7da18c4cd600>]]] | keyword[def] identifier[parse_sphinx_searchindex] ( identifier[searchindex] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[searchindex] , literal[string] ):
identifier[searchindex] = identifier[searchindex] . identifier[decode] ( literal[string] )
identifier[query] = literal[string]
identifier[pos] = identifier[searchindex] . identifier[find] ( identifier[query] )
keyword[if] identifier[pos] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[sel] = identifier[_select_block] ( identifier[searchindex] [ identifier[pos] :], literal[string] , literal[string] )
identifier[objects] = identifier[_parse_dict_recursive] ( identifier[sel] )
identifier[query] = literal[string]
identifier[pos] = identifier[searchindex] . identifier[find] ( identifier[query] )
keyword[if] identifier[pos] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[filenames] = identifier[searchindex] [ identifier[pos] + identifier[len] ( identifier[query] )+ literal[int] :]
identifier[filenames] = identifier[filenames] [: identifier[filenames] . identifier[find] ( literal[string] )]
identifier[filenames] =[ identifier[f] . identifier[strip] ( literal[string] ) keyword[for] identifier[f] keyword[in] identifier[filenames] . identifier[split] ( literal[string] )]
keyword[return] identifier[filenames] , identifier[objects] | def parse_sphinx_searchindex(searchindex):
"""Parse a Sphinx search index
Parameters
----------
searchindex : str
The Sphinx search index (contents of searchindex.js)
Returns
-------
filenames : list of str
The file names parsed from the search index.
objects : dict
The objects parsed from the search index.
"""
# Make sure searchindex uses UTF-8 encoding
if hasattr(searchindex, 'decode'):
searchindex = searchindex.decode('UTF-8') # depends on [control=['if'], data=[]]
# parse objects
query = 'objects:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"objects:" not found in search index') # depends on [control=['if'], data=[]]
sel = _select_block(searchindex[pos:], '{', '}')
objects = _parse_dict_recursive(sel)
# parse filenames
query = 'filenames:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"filenames:" not found in search index') # depends on [control=['if'], data=[]]
filenames = searchindex[pos + len(query) + 1:]
filenames = filenames[:filenames.find(']')]
filenames = [f.strip('"') for f in filenames.split(',')]
return (filenames, objects) |
def clean_parameters(self, params):
'''Only keep known parameters'''
return {k: v for k, v in params.items() if k in self.adapter.facets} | def function[clean_parameters, parameter[self, params]]:
constant[Only keep known parameters]
return[<ast.DictComp object at 0x7da204564e20>] | keyword[def] identifier[clean_parameters] ( identifier[self] , identifier[params] ):
literal[string]
keyword[return] { identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[self] . identifier[adapter] . identifier[facets] } | def clean_parameters(self, params):
"""Only keep known parameters"""
return {k: v for (k, v) in params.items() if k in self.adapter.facets} |
def vals(self, var_type):
"""
Create a dictionary with name/value pairs listing the
variables of a particular type that have a value.
"""
return {x: y for x, y in self.items() if y.has_value_of_type(var_type)} | def function[vals, parameter[self, var_type]]:
constant[
Create a dictionary with name/value pairs listing the
variables of a particular type that have a value.
]
return[<ast.DictComp object at 0x7da207f9b910>] | keyword[def] identifier[vals] ( identifier[self] , identifier[var_type] ):
literal[string]
keyword[return] { identifier[x] : identifier[y] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[self] . identifier[items] () keyword[if] identifier[y] . identifier[has_value_of_type] ( identifier[var_type] )} | def vals(self, var_type):
"""
Create a dictionary with name/value pairs listing the
variables of a particular type that have a value.
"""
return {x: y for (x, y) in self.items() if y.has_value_of_type(var_type)} |
def generate_license(args):
'''
Creates a LICENSE or LICENSE.txt file in the current directory. Reads from
the 'assets' folder and looks for placeholders enclosed in curly braces.
Arguments:
- (tuple) Name, email, license, project, ext, year
'''
with open(cwd + licenses_loc + args[2]) as f:
license = f.read()
license = license.format(name=args[0],
email=args[1],
license=args[2],
project=args[3],
year=args[5])
with open('LICENSE' + args[4], 'w') as f:
f.write(license)
print('licenser: license file added to current directory') | def function[generate_license, parameter[args]]:
constant[
Creates a LICENSE or LICENSE.txt file in the current directory. Reads from
the 'assets' folder and looks for placeholders enclosed in curly braces.
Arguments:
- (tuple) Name, email, license, project, ext, year
]
with call[name[open], parameter[binary_operation[binary_operation[name[cwd] + name[licenses_loc]] + call[name[args]][constant[2]]]]] begin[:]
variable[license] assign[=] call[name[f].read, parameter[]]
variable[license] assign[=] call[name[license].format, parameter[]]
with call[name[open], parameter[binary_operation[constant[LICENSE] + call[name[args]][constant[4]]], constant[w]]] begin[:]
call[name[f].write, parameter[name[license]]]
call[name[print], parameter[constant[licenser: license file added to current directory]]] | keyword[def] identifier[generate_license] ( identifier[args] ):
literal[string]
keyword[with] identifier[open] ( identifier[cwd] + identifier[licenses_loc] + identifier[args] [ literal[int] ]) keyword[as] identifier[f] :
identifier[license] = identifier[f] . identifier[read] ()
identifier[license] = identifier[license] . identifier[format] ( identifier[name] = identifier[args] [ literal[int] ],
identifier[email] = identifier[args] [ literal[int] ],
identifier[license] = identifier[args] [ literal[int] ],
identifier[project] = identifier[args] [ literal[int] ],
identifier[year] = identifier[args] [ literal[int] ])
keyword[with] identifier[open] ( literal[string] + identifier[args] [ literal[int] ], literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[license] )
identifier[print] ( literal[string] ) | def generate_license(args):
"""
Creates a LICENSE or LICENSE.txt file in the current directory. Reads from
the 'assets' folder and looks for placeholders enclosed in curly braces.
Arguments:
- (tuple) Name, email, license, project, ext, year
"""
with open(cwd + licenses_loc + args[2]) as f:
license = f.read() # depends on [control=['with'], data=['f']]
license = license.format(name=args[0], email=args[1], license=args[2], project=args[3], year=args[5])
with open('LICENSE' + args[4], 'w') as f:
f.write(license)
print('licenser: license file added to current directory') # depends on [control=['with'], data=['f']] |
def change_keyboard_control(self, onerror = None, **keys):
"""Change the parameters provided as keyword arguments:
key_click_percent
The volume of key clicks between 0 (off) and 100 (load).
-1 will restore default setting.
bell_percent
The base volume of the bell, coded as above.
bell_pitch
The pitch of the bell in Hz, -1 restores the default.
bell_duration
The duration of the bell in milliseconds, -1 restores
the default.
led
led_mode
led_mode should be X.LedModeOff or X.LedModeOn. If led is
provided, it should be a 32-bit mask listing the LEDs that
should change. If led is not provided, all LEDs are changed.
key
auto_repeat_mode
auto_repeat_mode should be one of X.AutoRepeatModeOff,
X.AutoRepeatModeOn, or X.AutoRepeatModeDefault. If key is
provided, that key will be modified, otherwise the global
state for the entire keyboard will be modified."""
request.ChangeKeyboardControl(display = self.display,
onerror = onerror,
attrs = keys) | def function[change_keyboard_control, parameter[self, onerror]]:
constant[Change the parameters provided as keyword arguments:
key_click_percent
The volume of key clicks between 0 (off) and 100 (load).
-1 will restore default setting.
bell_percent
The base volume of the bell, coded as above.
bell_pitch
The pitch of the bell in Hz, -1 restores the default.
bell_duration
The duration of the bell in milliseconds, -1 restores
the default.
led
led_mode
led_mode should be X.LedModeOff or X.LedModeOn. If led is
provided, it should be a 32-bit mask listing the LEDs that
should change. If led is not provided, all LEDs are changed.
key
auto_repeat_mode
auto_repeat_mode should be one of X.AutoRepeatModeOff,
X.AutoRepeatModeOn, or X.AutoRepeatModeDefault. If key is
provided, that key will be modified, otherwise the global
state for the entire keyboard will be modified.]
call[name[request].ChangeKeyboardControl, parameter[]] | keyword[def] identifier[change_keyboard_control] ( identifier[self] , identifier[onerror] = keyword[None] ,** identifier[keys] ):
literal[string]
identifier[request] . identifier[ChangeKeyboardControl] ( identifier[display] = identifier[self] . identifier[display] ,
identifier[onerror] = identifier[onerror] ,
identifier[attrs] = identifier[keys] ) | def change_keyboard_control(self, onerror=None, **keys):
"""Change the parameters provided as keyword arguments:
key_click_percent
The volume of key clicks between 0 (off) and 100 (load).
-1 will restore default setting.
bell_percent
The base volume of the bell, coded as above.
bell_pitch
The pitch of the bell in Hz, -1 restores the default.
bell_duration
The duration of the bell in milliseconds, -1 restores
the default.
led
led_mode
led_mode should be X.LedModeOff or X.LedModeOn. If led is
provided, it should be a 32-bit mask listing the LEDs that
should change. If led is not provided, all LEDs are changed.
key
auto_repeat_mode
auto_repeat_mode should be one of X.AutoRepeatModeOff,
X.AutoRepeatModeOn, or X.AutoRepeatModeDefault. If key is
provided, that key will be modified, otherwise the global
state for the entire keyboard will be modified."""
request.ChangeKeyboardControl(display=self.display, onerror=onerror, attrs=keys) |
def dipole_moment(r_array, charge_array):
'''Return the dipole moment of a neutral system.
'''
return np.sum(r_array * charge_array[:, np.newaxis], axis=0) | def function[dipole_moment, parameter[r_array, charge_array]]:
constant[Return the dipole moment of a neutral system.
]
return[call[name[np].sum, parameter[binary_operation[name[r_array] * call[name[charge_array]][tuple[[<ast.Slice object at 0x7da207f03520>, <ast.Attribute object at 0x7da207f02c80>]]]]]]] | keyword[def] identifier[dipole_moment] ( identifier[r_array] , identifier[charge_array] ):
literal[string]
keyword[return] identifier[np] . identifier[sum] ( identifier[r_array] * identifier[charge_array] [:, identifier[np] . identifier[newaxis] ], identifier[axis] = literal[int] ) | def dipole_moment(r_array, charge_array):
"""Return the dipole moment of a neutral system.
"""
return np.sum(r_array * charge_array[:, np.newaxis], axis=0) |
def store(self, key, value):
"""Store the key, value pair in our redis server"""
# Prepend tweepy to our key,
# this makes it easier to identify tweepy keys in our redis server
key = self.pre_identifier + key
# Get a pipe (to execute several redis commands in one step)
pipe = self.client.pipeline()
# Set our values in a redis hash (similar to python dict)
pipe.set(key, pickle.dumps((time.time(), value)))
# Set the expiration
pipe.expire(key, self.timeout)
# Add the key to a set containing all the keys
pipe.sadd(self.keys_container, key)
# Execute the instructions in the redis server
pipe.execute() | def function[store, parameter[self, key, value]]:
constant[Store the key, value pair in our redis server]
variable[key] assign[=] binary_operation[name[self].pre_identifier + name[key]]
variable[pipe] assign[=] call[name[self].client.pipeline, parameter[]]
call[name[pipe].set, parameter[name[key], call[name[pickle].dumps, parameter[tuple[[<ast.Call object at 0x7da18c4cc910>, <ast.Name object at 0x7da18c4cfb80>]]]]]]
call[name[pipe].expire, parameter[name[key], name[self].timeout]]
call[name[pipe].sadd, parameter[name[self].keys_container, name[key]]]
call[name[pipe].execute, parameter[]] | keyword[def] identifier[store] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
identifier[key] = identifier[self] . identifier[pre_identifier] + identifier[key]
identifier[pipe] = identifier[self] . identifier[client] . identifier[pipeline] ()
identifier[pipe] . identifier[set] ( identifier[key] , identifier[pickle] . identifier[dumps] (( identifier[time] . identifier[time] (), identifier[value] )))
identifier[pipe] . identifier[expire] ( identifier[key] , identifier[self] . identifier[timeout] )
identifier[pipe] . identifier[sadd] ( identifier[self] . identifier[keys_container] , identifier[key] )
identifier[pipe] . identifier[execute] () | def store(self, key, value):
"""Store the key, value pair in our redis server"""
# Prepend tweepy to our key,
# this makes it easier to identify tweepy keys in our redis server
key = self.pre_identifier + key
# Get a pipe (to execute several redis commands in one step)
pipe = self.client.pipeline()
# Set our values in a redis hash (similar to python dict)
pipe.set(key, pickle.dumps((time.time(), value)))
# Set the expiration
pipe.expire(key, self.timeout)
# Add the key to a set containing all the keys
pipe.sadd(self.keys_container, key)
# Execute the instructions in the redis server
pipe.execute() |
def package(
self, name, version, extras=None
): # type: (...) -> poetry.packages.Package
"""
Retrieve the release information.
This is a heavy task which takes time.
We have to download a package to get the dependencies.
We also need to download every file matching this release
to get the various hashes.
Note that, this will be cached so the subsequent operations
should be much faster.
"""
try:
index = self._packages.index(
poetry.packages.Package(name, version, version)
)
return self._packages[index]
except ValueError:
if extras is None:
extras = []
release_info = self.get_release_info(name, version)
package = poetry.packages.Package(name, version, version)
if release_info["requires_python"]:
package.python_versions = release_info["requires_python"]
package.source_type = "legacy"
package.source_url = self._url
package.source_reference = self.name
requires_dist = release_info["requires_dist"] or []
for req in requires_dist:
try:
dependency = dependency_from_pep_508(req)
except InvalidMarker:
# Invalid marker
# We strip the markers hoping for the best
req = req.split(";")[0]
dependency = dependency_from_pep_508(req)
except ValueError:
# Likely unable to parse constraint so we skip it
self._log(
"Invalid constraint ({}) found in {}-{} dependencies, "
"skipping".format(req, package.name, package.version),
level="debug",
)
continue
if dependency.in_extras:
for extra in dependency.in_extras:
if extra not in package.extras:
package.extras[extra] = []
package.extras[extra].append(dependency)
if not dependency.is_optional():
package.requires.append(dependency)
# Adding description
package.description = release_info.get("summary", "")
# Adding hashes information
package.hashes = release_info["digests"]
# Activate extra dependencies
for extra in extras:
if extra in package.extras:
for dep in package.extras[extra]:
dep.activate()
package.requires += package.extras[extra]
self._packages.append(package)
return package | def function[package, parameter[self, name, version, extras]]:
constant[
Retrieve the release information.
This is a heavy task which takes time.
We have to download a package to get the dependencies.
We also need to download every file matching this release
to get the various hashes.
Note that, this will be cached so the subsequent operations
should be much faster.
]
<ast.Try object at 0x7da20e9b3010> | keyword[def] identifier[package] (
identifier[self] , identifier[name] , identifier[version] , identifier[extras] = keyword[None]
):
literal[string]
keyword[try] :
identifier[index] = identifier[self] . identifier[_packages] . identifier[index] (
identifier[poetry] . identifier[packages] . identifier[Package] ( identifier[name] , identifier[version] , identifier[version] )
)
keyword[return] identifier[self] . identifier[_packages] [ identifier[index] ]
keyword[except] identifier[ValueError] :
keyword[if] identifier[extras] keyword[is] keyword[None] :
identifier[extras] =[]
identifier[release_info] = identifier[self] . identifier[get_release_info] ( identifier[name] , identifier[version] )
identifier[package] = identifier[poetry] . identifier[packages] . identifier[Package] ( identifier[name] , identifier[version] , identifier[version] )
keyword[if] identifier[release_info] [ literal[string] ]:
identifier[package] . identifier[python_versions] = identifier[release_info] [ literal[string] ]
identifier[package] . identifier[source_type] = literal[string]
identifier[package] . identifier[source_url] = identifier[self] . identifier[_url]
identifier[package] . identifier[source_reference] = identifier[self] . identifier[name]
identifier[requires_dist] = identifier[release_info] [ literal[string] ] keyword[or] []
keyword[for] identifier[req] keyword[in] identifier[requires_dist] :
keyword[try] :
identifier[dependency] = identifier[dependency_from_pep_508] ( identifier[req] )
keyword[except] identifier[InvalidMarker] :
identifier[req] = identifier[req] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[dependency] = identifier[dependency_from_pep_508] ( identifier[req] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_log] (
literal[string]
literal[string] . identifier[format] ( identifier[req] , identifier[package] . identifier[name] , identifier[package] . identifier[version] ),
identifier[level] = literal[string] ,
)
keyword[continue]
keyword[if] identifier[dependency] . identifier[in_extras] :
keyword[for] identifier[extra] keyword[in] identifier[dependency] . identifier[in_extras] :
keyword[if] identifier[extra] keyword[not] keyword[in] identifier[package] . identifier[extras] :
identifier[package] . identifier[extras] [ identifier[extra] ]=[]
identifier[package] . identifier[extras] [ identifier[extra] ]. identifier[append] ( identifier[dependency] )
keyword[if] keyword[not] identifier[dependency] . identifier[is_optional] ():
identifier[package] . identifier[requires] . identifier[append] ( identifier[dependency] )
identifier[package] . identifier[description] = identifier[release_info] . identifier[get] ( literal[string] , literal[string] )
identifier[package] . identifier[hashes] = identifier[release_info] [ literal[string] ]
keyword[for] identifier[extra] keyword[in] identifier[extras] :
keyword[if] identifier[extra] keyword[in] identifier[package] . identifier[extras] :
keyword[for] identifier[dep] keyword[in] identifier[package] . identifier[extras] [ identifier[extra] ]:
identifier[dep] . identifier[activate] ()
identifier[package] . identifier[requires] += identifier[package] . identifier[extras] [ identifier[extra] ]
identifier[self] . identifier[_packages] . identifier[append] ( identifier[package] )
keyword[return] identifier[package] | def package(self, name, version, extras=None): # type: (...) -> poetry.packages.Package
'\n Retrieve the release information.\n\n This is a heavy task which takes time.\n We have to download a package to get the dependencies.\n We also need to download every file matching this release\n to get the various hashes.\n\n Note that, this will be cached so the subsequent operations\n should be much faster.\n '
try:
index = self._packages.index(poetry.packages.Package(name, version, version))
return self._packages[index] # depends on [control=['try'], data=[]]
except ValueError:
if extras is None:
extras = [] # depends on [control=['if'], data=['extras']]
release_info = self.get_release_info(name, version)
package = poetry.packages.Package(name, version, version)
if release_info['requires_python']:
package.python_versions = release_info['requires_python'] # depends on [control=['if'], data=[]]
package.source_type = 'legacy'
package.source_url = self._url
package.source_reference = self.name
requires_dist = release_info['requires_dist'] or []
for req in requires_dist:
try:
dependency = dependency_from_pep_508(req) # depends on [control=['try'], data=[]]
except InvalidMarker:
# Invalid marker
# We strip the markers hoping for the best
req = req.split(';')[0]
dependency = dependency_from_pep_508(req) # depends on [control=['except'], data=[]]
except ValueError:
# Likely unable to parse constraint so we skip it
self._log('Invalid constraint ({}) found in {}-{} dependencies, skipping'.format(req, package.name, package.version), level='debug')
continue # depends on [control=['except'], data=[]]
if dependency.in_extras:
for extra in dependency.in_extras:
if extra not in package.extras:
package.extras[extra] = [] # depends on [control=['if'], data=['extra']]
package.extras[extra].append(dependency) # depends on [control=['for'], data=['extra']] # depends on [control=['if'], data=[]]
if not dependency.is_optional():
package.requires.append(dependency) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['req']]
# Adding description
package.description = release_info.get('summary', '')
# Adding hashes information
package.hashes = release_info['digests']
# Activate extra dependencies
for extra in extras:
if extra in package.extras:
for dep in package.extras[extra]:
dep.activate() # depends on [control=['for'], data=['dep']]
package.requires += package.extras[extra] # depends on [control=['if'], data=['extra']] # depends on [control=['for'], data=['extra']]
self._packages.append(package)
return package # depends on [control=['except'], data=[]] |
def _commonWordStart(self, words):
"""Get common start of all words.
i.e. for ['blablaxxx', 'blablayyy', 'blazzz'] common start is 'bla'
"""
if not words:
return ''
length = 0
firstWord = words[0]
otherWords = words[1:]
for index, char in enumerate(firstWord):
if not all([word[index] == char for word in otherWords]):
break
length = index + 1
return firstWord[:length] | def function[_commonWordStart, parameter[self, words]]:
constant[Get common start of all words.
i.e. for ['blablaxxx', 'blablayyy', 'blazzz'] common start is 'bla'
]
if <ast.UnaryOp object at 0x7da20c76e6b0> begin[:]
return[constant[]]
variable[length] assign[=] constant[0]
variable[firstWord] assign[=] call[name[words]][constant[0]]
variable[otherWords] assign[=] call[name[words]][<ast.Slice object at 0x7da20c76d9c0>]
for taget[tuple[[<ast.Name object at 0x7da20c76db70>, <ast.Name object at 0x7da20c76e7a0>]]] in starred[call[name[enumerate], parameter[name[firstWord]]]] begin[:]
if <ast.UnaryOp object at 0x7da20c76cac0> begin[:]
break
variable[length] assign[=] binary_operation[name[index] + constant[1]]
return[call[name[firstWord]][<ast.Slice object at 0x7da20c76d2d0>]] | keyword[def] identifier[_commonWordStart] ( identifier[self] , identifier[words] ):
literal[string]
keyword[if] keyword[not] identifier[words] :
keyword[return] literal[string]
identifier[length] = literal[int]
identifier[firstWord] = identifier[words] [ literal[int] ]
identifier[otherWords] = identifier[words] [ literal[int] :]
keyword[for] identifier[index] , identifier[char] keyword[in] identifier[enumerate] ( identifier[firstWord] ):
keyword[if] keyword[not] identifier[all] ([ identifier[word] [ identifier[index] ]== identifier[char] keyword[for] identifier[word] keyword[in] identifier[otherWords] ]):
keyword[break]
identifier[length] = identifier[index] + literal[int]
keyword[return] identifier[firstWord] [: identifier[length] ] | def _commonWordStart(self, words):
"""Get common start of all words.
i.e. for ['blablaxxx', 'blablayyy', 'blazzz'] common start is 'bla'
"""
if not words:
return '' # depends on [control=['if'], data=[]]
length = 0
firstWord = words[0]
otherWords = words[1:]
for (index, char) in enumerate(firstWord):
if not all([word[index] == char for word in otherWords]):
break # depends on [control=['if'], data=[]]
length = index + 1 # depends on [control=['for'], data=[]]
return firstWord[:length] |
def volume_disk_temp_max(self, volume):
"""Maximum temperature of all disks making up the volume"""
volume = self._get_volume(volume)
if volume is not None:
vol_disks = volume["disks"]
if vol_disks is not None:
max_temp = 0
for vol_disk in vol_disks:
disk_temp = self.disk_temp(vol_disk)
if disk_temp is not None and disk_temp > max_temp:
max_temp = disk_temp
return max_temp | def function[volume_disk_temp_max, parameter[self, volume]]:
constant[Maximum temperature of all disks making up the volume]
variable[volume] assign[=] call[name[self]._get_volume, parameter[name[volume]]]
if compare[name[volume] is_not constant[None]] begin[:]
variable[vol_disks] assign[=] call[name[volume]][constant[disks]]
if compare[name[vol_disks] is_not constant[None]] begin[:]
variable[max_temp] assign[=] constant[0]
for taget[name[vol_disk]] in starred[name[vol_disks]] begin[:]
variable[disk_temp] assign[=] call[name[self].disk_temp, parameter[name[vol_disk]]]
if <ast.BoolOp object at 0x7da1b026e0b0> begin[:]
variable[max_temp] assign[=] name[disk_temp]
return[name[max_temp]] | keyword[def] identifier[volume_disk_temp_max] ( identifier[self] , identifier[volume] ):
literal[string]
identifier[volume] = identifier[self] . identifier[_get_volume] ( identifier[volume] )
keyword[if] identifier[volume] keyword[is] keyword[not] keyword[None] :
identifier[vol_disks] = identifier[volume] [ literal[string] ]
keyword[if] identifier[vol_disks] keyword[is] keyword[not] keyword[None] :
identifier[max_temp] = literal[int]
keyword[for] identifier[vol_disk] keyword[in] identifier[vol_disks] :
identifier[disk_temp] = identifier[self] . identifier[disk_temp] ( identifier[vol_disk] )
keyword[if] identifier[disk_temp] keyword[is] keyword[not] keyword[None] keyword[and] identifier[disk_temp] > identifier[max_temp] :
identifier[max_temp] = identifier[disk_temp]
keyword[return] identifier[max_temp] | def volume_disk_temp_max(self, volume):
"""Maximum temperature of all disks making up the volume"""
volume = self._get_volume(volume)
if volume is not None:
vol_disks = volume['disks']
if vol_disks is not None:
max_temp = 0
for vol_disk in vol_disks:
disk_temp = self.disk_temp(vol_disk)
if disk_temp is not None and disk_temp > max_temp:
max_temp = disk_temp # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vol_disk']]
return max_temp # depends on [control=['if'], data=['vol_disks']] # depends on [control=['if'], data=['volume']] |
def find_parameter(parameters, **kwargs):
"""
Given a list of parameters, find the one with the given name.
"""
matching_parameters = filter_parameters(parameters, **kwargs)
if len(matching_parameters) == 1:
return matching_parameters[0]
elif len(matching_parameters) > 1:
raise MultipleParametersFound()
raise NoParameterFound() | def function[find_parameter, parameter[parameters]]:
constant[
Given a list of parameters, find the one with the given name.
]
variable[matching_parameters] assign[=] call[name[filter_parameters], parameter[name[parameters]]]
if compare[call[name[len], parameter[name[matching_parameters]]] equal[==] constant[1]] begin[:]
return[call[name[matching_parameters]][constant[0]]]
<ast.Raise object at 0x7da1b0f0ca90> | keyword[def] identifier[find_parameter] ( identifier[parameters] ,** identifier[kwargs] ):
literal[string]
identifier[matching_parameters] = identifier[filter_parameters] ( identifier[parameters] ,** identifier[kwargs] )
keyword[if] identifier[len] ( identifier[matching_parameters] )== literal[int] :
keyword[return] identifier[matching_parameters] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[matching_parameters] )> literal[int] :
keyword[raise] identifier[MultipleParametersFound] ()
keyword[raise] identifier[NoParameterFound] () | def find_parameter(parameters, **kwargs):
"""
Given a list of parameters, find the one with the given name.
"""
matching_parameters = filter_parameters(parameters, **kwargs)
if len(matching_parameters) == 1:
return matching_parameters[0] # depends on [control=['if'], data=[]]
elif len(matching_parameters) > 1:
raise MultipleParametersFound() # depends on [control=['if'], data=[]]
raise NoParameterFound() |
def update(self, defaults=values.unset):
"""
Update the DefaultsInstance
:param dict defaults: A JSON string that describes the default task links.
:returns: Updated DefaultsInstance
:rtype: twilio.rest.autopilot.v1.assistant.defaults.DefaultsInstance
"""
return self._proxy.update(defaults=defaults, ) | def function[update, parameter[self, defaults]]:
constant[
Update the DefaultsInstance
:param dict defaults: A JSON string that describes the default task links.
:returns: Updated DefaultsInstance
:rtype: twilio.rest.autopilot.v1.assistant.defaults.DefaultsInstance
]
return[call[name[self]._proxy.update, parameter[]]] | keyword[def] identifier[update] ( identifier[self] , identifier[defaults] = identifier[values] . identifier[unset] ):
literal[string]
keyword[return] identifier[self] . identifier[_proxy] . identifier[update] ( identifier[defaults] = identifier[defaults] ,) | def update(self, defaults=values.unset):
"""
Update the DefaultsInstance
:param dict defaults: A JSON string that describes the default task links.
:returns: Updated DefaultsInstance
:rtype: twilio.rest.autopilot.v1.assistant.defaults.DefaultsInstance
"""
return self._proxy.update(defaults=defaults) |
def _partial_date_slice(self, resolution, parsed):
"""Adapted from
pandas.tseries.index.DatetimeIndex._partial_date_slice
Note that when using a CFTimeIndex, if a partial-date selection
returns a single element, it will never be converted to a scalar
coordinate; this is in slight contrast to the behavior when using
a DatetimeIndex, which sometimes will return a DataArray with a scalar
coordinate depending on the resolution of the datetimes used in
defining the index. For example:
>>> from cftime import DatetimeNoLeap
>>> import pandas as pd
>>> import xarray as xr
>>> da = xr.DataArray([1, 2],
coords=[[DatetimeNoLeap(2001, 1, 1),
DatetimeNoLeap(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) object 2001-01-01 00:00:00
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray ()>
array(1)
Coordinates:
time datetime64[ns] 2001-01-01
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) datetime64[ns] 2001-01-01T01:00:00
"""
start, end = _parsed_string_to_bounds(self.date_type, resolution,
parsed)
times = self._data
if self.is_monotonic:
if (len(times) and ((start < times[0] and end < times[0]) or
(start > times[-1] and end > times[-1]))):
# we are out of range
raise KeyError
# a monotonic (sorted) series can be sliced
left = times.searchsorted(start, side='left')
right = times.searchsorted(end, side='right')
return slice(left, right)
lhs_mask = times >= start
rhs_mask = times <= end
return np.flatnonzero(lhs_mask & rhs_mask) | def function[_partial_date_slice, parameter[self, resolution, parsed]]:
constant[Adapted from
pandas.tseries.index.DatetimeIndex._partial_date_slice
Note that when using a CFTimeIndex, if a partial-date selection
returns a single element, it will never be converted to a scalar
coordinate; this is in slight contrast to the behavior when using
a DatetimeIndex, which sometimes will return a DataArray with a scalar
coordinate depending on the resolution of the datetimes used in
defining the index. For example:
>>> from cftime import DatetimeNoLeap
>>> import pandas as pd
>>> import xarray as xr
>>> da = xr.DataArray([1, 2],
coords=[[DatetimeNoLeap(2001, 1, 1),
DatetimeNoLeap(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) object 2001-01-01 00:00:00
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray ()>
array(1)
Coordinates:
time datetime64[ns] 2001-01-01
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) datetime64[ns] 2001-01-01T01:00:00
]
<ast.Tuple object at 0x7da20e74bf10> assign[=] call[name[_parsed_string_to_bounds], parameter[name[self].date_type, name[resolution], name[parsed]]]
variable[times] assign[=] name[self]._data
if name[self].is_monotonic begin[:]
if <ast.BoolOp object at 0x7da20e748dc0> begin[:]
<ast.Raise object at 0x7da18c4cdb70>
variable[left] assign[=] call[name[times].searchsorted, parameter[name[start]]]
variable[right] assign[=] call[name[times].searchsorted, parameter[name[end]]]
return[call[name[slice], parameter[name[left], name[right]]]]
variable[lhs_mask] assign[=] compare[name[times] greater_or_equal[>=] name[start]]
variable[rhs_mask] assign[=] compare[name[times] less_or_equal[<=] name[end]]
return[call[name[np].flatnonzero, parameter[binary_operation[name[lhs_mask] <ast.BitAnd object at 0x7da2590d6b60> name[rhs_mask]]]]] | keyword[def] identifier[_partial_date_slice] ( identifier[self] , identifier[resolution] , identifier[parsed] ):
literal[string]
identifier[start] , identifier[end] = identifier[_parsed_string_to_bounds] ( identifier[self] . identifier[date_type] , identifier[resolution] ,
identifier[parsed] )
identifier[times] = identifier[self] . identifier[_data]
keyword[if] identifier[self] . identifier[is_monotonic] :
keyword[if] ( identifier[len] ( identifier[times] ) keyword[and] (( identifier[start] < identifier[times] [ literal[int] ] keyword[and] identifier[end] < identifier[times] [ literal[int] ]) keyword[or]
( identifier[start] > identifier[times] [- literal[int] ] keyword[and] identifier[end] > identifier[times] [- literal[int] ]))):
keyword[raise] identifier[KeyError]
identifier[left] = identifier[times] . identifier[searchsorted] ( identifier[start] , identifier[side] = literal[string] )
identifier[right] = identifier[times] . identifier[searchsorted] ( identifier[end] , identifier[side] = literal[string] )
keyword[return] identifier[slice] ( identifier[left] , identifier[right] )
identifier[lhs_mask] = identifier[times] >= identifier[start]
identifier[rhs_mask] = identifier[times] <= identifier[end]
keyword[return] identifier[np] . identifier[flatnonzero] ( identifier[lhs_mask] & identifier[rhs_mask] ) | def _partial_date_slice(self, resolution, parsed):
"""Adapted from
pandas.tseries.index.DatetimeIndex._partial_date_slice
Note that when using a CFTimeIndex, if a partial-date selection
returns a single element, it will never be converted to a scalar
coordinate; this is in slight contrast to the behavior when using
a DatetimeIndex, which sometimes will return a DataArray with a scalar
coordinate depending on the resolution of the datetimes used in
defining the index. For example:
>>> from cftime import DatetimeNoLeap
>>> import pandas as pd
>>> import xarray as xr
>>> da = xr.DataArray([1, 2],
coords=[[DatetimeNoLeap(2001, 1, 1),
DatetimeNoLeap(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) object 2001-01-01 00:00:00
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray ()>
array(1)
Coordinates:
time datetime64[ns] 2001-01-01
>>> da = xr.DataArray([1, 2],
coords=[[pd.Timestamp(2001, 1, 1, 1),
pd.Timestamp(2001, 2, 1)]],
dims=['time'])
>>> da.sel(time='2001-01-01')
<xarray.DataArray (time: 1)>
array([1])
Coordinates:
* time (time) datetime64[ns] 2001-01-01T01:00:00
"""
(start, end) = _parsed_string_to_bounds(self.date_type, resolution, parsed)
times = self._data
if self.is_monotonic:
if len(times) and (start < times[0] and end < times[0] or (start > times[-1] and end > times[-1])):
# we are out of range
raise KeyError # depends on [control=['if'], data=[]]
# a monotonic (sorted) series can be sliced
left = times.searchsorted(start, side='left')
right = times.searchsorted(end, side='right')
return slice(left, right) # depends on [control=['if'], data=[]]
lhs_mask = times >= start
rhs_mask = times <= end
return np.flatnonzero(lhs_mask & rhs_mask) |
def get_template(vm_):
r'''
Return the template id for a VM.
.. versionadded:: 2016.11.0
vm\_
The VM dictionary for which to obtain a template.
'''
vm_template = six.text_type(config.get_cloud_config_value(
'template', vm_, __opts__, search_global=False
))
try:
return list_templates()[vm_template]['id']
except KeyError:
raise SaltCloudNotFound(
'The specified template, \'{0}\', could not be found.'.format(vm_template)
) | def function[get_template, parameter[vm_]]:
constant[
Return the template id for a VM.
.. versionadded:: 2016.11.0
vm\_
The VM dictionary for which to obtain a template.
]
variable[vm_template] assign[=] call[name[six].text_type, parameter[call[name[config].get_cloud_config_value, parameter[constant[template], name[vm_], name[__opts__]]]]]
<ast.Try object at 0x7da20c7962c0> | keyword[def] identifier[get_template] ( identifier[vm_] ):
literal[string]
identifier[vm_template] = identifier[six] . identifier[text_type] ( identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False]
))
keyword[try] :
keyword[return] identifier[list_templates] ()[ identifier[vm_template] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[SaltCloudNotFound] (
literal[string] . identifier[format] ( identifier[vm_template] )
) | def get_template(vm_):
"""
Return the template id for a VM.
.. versionadded:: 2016.11.0
vm\\_
The VM dictionary for which to obtain a template.
"""
vm_template = six.text_type(config.get_cloud_config_value('template', vm_, __opts__, search_global=False))
try:
return list_templates()[vm_template]['id'] # depends on [control=['try'], data=[]]
except KeyError:
raise SaltCloudNotFound("The specified template, '{0}', could not be found.".format(vm_template)) # depends on [control=['except'], data=[]] |
def get_sectionsf(self, *args, **kwargs):
"""
Decorator method to extract sections from a function docstring
Parameters
----------
``*args`` and ``**kwargs``
See the :meth:`get_sections` method. Note, that the first argument
will be the docstring of the specified function
Returns
-------
function
Wrapper that takes a function as input and registers its sections
via the :meth:`get_sections` method"""
def func(f):
doc = f.__doc__
self.get_sections(doc or '', *args, **kwargs)
return f
return func | def function[get_sectionsf, parameter[self]]:
constant[
Decorator method to extract sections from a function docstring
Parameters
----------
``*args`` and ``**kwargs``
See the :meth:`get_sections` method. Note, that the first argument
will be the docstring of the specified function
Returns
-------
function
Wrapper that takes a function as input and registers its sections
via the :meth:`get_sections` method]
def function[func, parameter[f]]:
variable[doc] assign[=] name[f].__doc__
call[name[self].get_sections, parameter[<ast.BoolOp object at 0x7da20c991d20>, <ast.Starred object at 0x7da20c9926e0>]]
return[name[f]]
return[name[func]] | keyword[def] identifier[get_sectionsf] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[func] ( identifier[f] ):
identifier[doc] = identifier[f] . identifier[__doc__]
identifier[self] . identifier[get_sections] ( identifier[doc] keyword[or] literal[string] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[f]
keyword[return] identifier[func] | def get_sectionsf(self, *args, **kwargs):
"""
Decorator method to extract sections from a function docstring
Parameters
----------
``*args`` and ``**kwargs``
See the :meth:`get_sections` method. Note, that the first argument
will be the docstring of the specified function
Returns
-------
function
Wrapper that takes a function as input and registers its sections
via the :meth:`get_sections` method"""
def func(f):
doc = f.__doc__
self.get_sections(doc or '', *args, **kwargs)
return f
return func |
def reset(self, ms=0, halt=True):
"""Resets the target.
This method resets the target, and by default toggles the RESET and
TRST pins.
Args:
self (JLink): the ``JLink`` instance
ms (int): Amount of milliseconds to delay after reset (default: 0)
halt (bool): if the CPU should halt after reset (default: True)
Returns:
Number of bytes read.
"""
self._dll.JLINKARM_SetResetDelay(ms)
res = self._dll.JLINKARM_Reset()
if res < 0:
raise errors.JLinkException(res)
elif not halt:
self._dll.JLINKARM_Go()
return res | def function[reset, parameter[self, ms, halt]]:
constant[Resets the target.
This method resets the target, and by default toggles the RESET and
TRST pins.
Args:
self (JLink): the ``JLink`` instance
ms (int): Amount of milliseconds to delay after reset (default: 0)
halt (bool): if the CPU should halt after reset (default: True)
Returns:
Number of bytes read.
]
call[name[self]._dll.JLINKARM_SetResetDelay, parameter[name[ms]]]
variable[res] assign[=] call[name[self]._dll.JLINKARM_Reset, parameter[]]
if compare[name[res] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b26ace20>
return[name[res]] | keyword[def] identifier[reset] ( identifier[self] , identifier[ms] = literal[int] , identifier[halt] = keyword[True] ):
literal[string]
identifier[self] . identifier[_dll] . identifier[JLINKARM_SetResetDelay] ( identifier[ms] )
identifier[res] = identifier[self] . identifier[_dll] . identifier[JLINKARM_Reset] ()
keyword[if] identifier[res] < literal[int] :
keyword[raise] identifier[errors] . identifier[JLinkException] ( identifier[res] )
keyword[elif] keyword[not] identifier[halt] :
identifier[self] . identifier[_dll] . identifier[JLINKARM_Go] ()
keyword[return] identifier[res] | def reset(self, ms=0, halt=True):
"""Resets the target.
This method resets the target, and by default toggles the RESET and
TRST pins.
Args:
self (JLink): the ``JLink`` instance
ms (int): Amount of milliseconds to delay after reset (default: 0)
halt (bool): if the CPU should halt after reset (default: True)
Returns:
Number of bytes read.
"""
self._dll.JLINKARM_SetResetDelay(ms)
res = self._dll.JLINKARM_Reset()
if res < 0:
raise errors.JLinkException(res) # depends on [control=['if'], data=['res']]
elif not halt:
self._dll.JLINKARM_Go() # depends on [control=['if'], data=[]]
return res |
def _shape(self):
""" Returns the shape of the data array associated with this file."""
hdu = self.open()
_shape = hdu.shape
if not self.inmemory:
self.close()
del hdu
return _shape | def function[_shape, parameter[self]]:
constant[ Returns the shape of the data array associated with this file.]
variable[hdu] assign[=] call[name[self].open, parameter[]]
variable[_shape] assign[=] name[hdu].shape
if <ast.UnaryOp object at 0x7da1b0edd4b0> begin[:]
call[name[self].close, parameter[]]
<ast.Delete object at 0x7da1b0edcc40>
return[name[_shape]] | keyword[def] identifier[_shape] ( identifier[self] ):
literal[string]
identifier[hdu] = identifier[self] . identifier[open] ()
identifier[_shape] = identifier[hdu] . identifier[shape]
keyword[if] keyword[not] identifier[self] . identifier[inmemory] :
identifier[self] . identifier[close] ()
keyword[del] identifier[hdu]
keyword[return] identifier[_shape] | def _shape(self):
""" Returns the shape of the data array associated with this file."""
hdu = self.open()
_shape = hdu.shape
if not self.inmemory:
self.close()
del hdu # depends on [control=['if'], data=[]]
return _shape |
def _error_on_missing_application(self, params):
""" Raise an ApplicationNotFoundError if the app is not accessible
This method checks in the system path (usually $PATH) or for
the existence of self._command. If self._command is not found
in either place, an ApplicationNotFoundError is raised to
inform the user that the application they are trying to access is
not available.
This method should be overwritten when self._command does not
represent the relevant executable (e.g., self._command = 'prog -a')
or in more complex cases where the file to be executed may be
passed as a parameter (e.g., with java jar files, where the
jar file is passed to java via '-jar'). It can also be overwritten
to by-pass testing for application presence by never raising an
error.
"""
command = self._command
# strip off " characters, in case we got a FilePath object
found_in_path = which(command.strip('"')) is not None
if not (exists(command) or found_in_path):
raise ApplicationNotFoundError("Cannot find %s. Is it installed? "
"Is it in your path?" % command) | def function[_error_on_missing_application, parameter[self, params]]:
constant[ Raise an ApplicationNotFoundError if the app is not accessible
This method checks in the system path (usually $PATH) or for
the existence of self._command. If self._command is not found
in either place, an ApplicationNotFoundError is raised to
inform the user that the application they are trying to access is
not available.
This method should be overwritten when self._command does not
represent the relevant executable (e.g., self._command = 'prog -a')
or in more complex cases where the file to be executed may be
passed as a parameter (e.g., with java jar files, where the
jar file is passed to java via '-jar'). It can also be overwritten
to by-pass testing for application presence by never raising an
error.
]
variable[command] assign[=] name[self]._command
variable[found_in_path] assign[=] compare[call[name[which], parameter[call[name[command].strip, parameter[constant["]]]]] is_not constant[None]]
if <ast.UnaryOp object at 0x7da1b0bd93f0> begin[:]
<ast.Raise object at 0x7da1b0b72770> | keyword[def] identifier[_error_on_missing_application] ( identifier[self] , identifier[params] ):
literal[string]
identifier[command] = identifier[self] . identifier[_command]
identifier[found_in_path] = identifier[which] ( identifier[command] . identifier[strip] ( literal[string] )) keyword[is] keyword[not] keyword[None]
keyword[if] keyword[not] ( identifier[exists] ( identifier[command] ) keyword[or] identifier[found_in_path] ):
keyword[raise] identifier[ApplicationNotFoundError] ( literal[string]
literal[string] % identifier[command] ) | def _error_on_missing_application(self, params):
""" Raise an ApplicationNotFoundError if the app is not accessible
This method checks in the system path (usually $PATH) or for
the existence of self._command. If self._command is not found
in either place, an ApplicationNotFoundError is raised to
inform the user that the application they are trying to access is
not available.
This method should be overwritten when self._command does not
represent the relevant executable (e.g., self._command = 'prog -a')
or in more complex cases where the file to be executed may be
passed as a parameter (e.g., with java jar files, where the
jar file is passed to java via '-jar'). It can also be overwritten
to by-pass testing for application presence by never raising an
error.
"""
command = self._command
# strip off " characters, in case we got a FilePath object
found_in_path = which(command.strip('"')) is not None
if not (exists(command) or found_in_path):
raise ApplicationNotFoundError('Cannot find %s. Is it installed? Is it in your path?' % command) # depends on [control=['if'], data=[]] |
def terminate(self):
"""Stop the standalone manager."""
logger.info(__(
"Terminating Resolwe listener on channel '{}'.",
state.MANAGER_EXECUTOR_CHANNELS.queue
))
self._should_stop = True | def function[terminate, parameter[self]]:
constant[Stop the standalone manager.]
call[name[logger].info, parameter[call[name[__], parameter[constant[Terminating Resolwe listener on channel '{}'.], name[state].MANAGER_EXECUTOR_CHANNELS.queue]]]]
name[self]._should_stop assign[=] constant[True] | keyword[def] identifier[terminate] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( identifier[__] (
literal[string] ,
identifier[state] . identifier[MANAGER_EXECUTOR_CHANNELS] . identifier[queue]
))
identifier[self] . identifier[_should_stop] = keyword[True] | def terminate(self):
"""Stop the standalone manager."""
logger.info(__("Terminating Resolwe listener on channel '{}'.", state.MANAGER_EXECUTOR_CHANNELS.queue))
self._should_stop = True |
def set_APSR_flag_to_value(self, flag, value):
"""
Set or clear flag in ASPR
:param flag: The flag to set
:param value: If value evaulates to true, it is set, cleared otherwise
:return:
"""
if flag == 'N':
bit = 31
elif flag == 'Z':
bit = 30
elif flag == 'C':
bit = 29
elif flag == 'V':
bit = 28
else:
raise AttributeError("Flag {} does not exist in the APSR".format(flag))
if value:
self.register['APSR'] |= (1 << bit)
else:
self.register['APSR'] -= (1 << bit) if (self.register['APSR'] & (1 << bit)) else 0 | def function[set_APSR_flag_to_value, parameter[self, flag, value]]:
constant[
Set or clear flag in ASPR
:param flag: The flag to set
:param value: If value evaulates to true, it is set, cleared otherwise
:return:
]
if compare[name[flag] equal[==] constant[N]] begin[:]
variable[bit] assign[=] constant[31]
if name[value] begin[:]
<ast.AugAssign object at 0x7da20e957c10> | keyword[def] identifier[set_APSR_flag_to_value] ( identifier[self] , identifier[flag] , identifier[value] ):
literal[string]
keyword[if] identifier[flag] == literal[string] :
identifier[bit] = literal[int]
keyword[elif] identifier[flag] == literal[string] :
identifier[bit] = literal[int]
keyword[elif] identifier[flag] == literal[string] :
identifier[bit] = literal[int]
keyword[elif] identifier[flag] == literal[string] :
identifier[bit] = literal[int]
keyword[else] :
keyword[raise] identifier[AttributeError] ( literal[string] . identifier[format] ( identifier[flag] ))
keyword[if] identifier[value] :
identifier[self] . identifier[register] [ literal[string] ]|=( literal[int] << identifier[bit] )
keyword[else] :
identifier[self] . identifier[register] [ literal[string] ]-=( literal[int] << identifier[bit] ) keyword[if] ( identifier[self] . identifier[register] [ literal[string] ]&( literal[int] << identifier[bit] )) keyword[else] literal[int] | def set_APSR_flag_to_value(self, flag, value):
"""
Set or clear flag in ASPR
:param flag: The flag to set
:param value: If value evaulates to true, it is set, cleared otherwise
:return:
"""
if flag == 'N':
bit = 31 # depends on [control=['if'], data=[]]
elif flag == 'Z':
bit = 30 # depends on [control=['if'], data=[]]
elif flag == 'C':
bit = 29 # depends on [control=['if'], data=[]]
elif flag == 'V':
bit = 28 # depends on [control=['if'], data=[]]
else:
raise AttributeError('Flag {} does not exist in the APSR'.format(flag))
if value:
self.register['APSR'] |= 1 << bit # depends on [control=['if'], data=[]]
else:
self.register['APSR'] -= 1 << bit if self.register['APSR'] & 1 << bit else 0 |
def use_plenary_bin_view(self):
"""Pass through to provider ResourceBinSession.use_plenary_bin_view"""
self._bin_view = PLENARY
# self._get_provider_session('resource_bin_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_bin_view()
except AttributeError:
pass | def function[use_plenary_bin_view, parameter[self]]:
constant[Pass through to provider ResourceBinSession.use_plenary_bin_view]
name[self]._bin_view assign[=] name[PLENARY]
for taget[name[session]] in starred[call[name[self]._get_provider_sessions, parameter[]]] begin[:]
<ast.Try object at 0x7da1b0a65300> | keyword[def] identifier[use_plenary_bin_view] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_bin_view] = identifier[PLENARY]
keyword[for] identifier[session] keyword[in] identifier[self] . identifier[_get_provider_sessions] ():
keyword[try] :
identifier[session] . identifier[use_plenary_bin_view] ()
keyword[except] identifier[AttributeError] :
keyword[pass] | def use_plenary_bin_view(self):
"""Pass through to provider ResourceBinSession.use_plenary_bin_view"""
self._bin_view = PLENARY
# self._get_provider_session('resource_bin_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_bin_view() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['session']] |
def round(self, value_array):
"""
Rounds a categorical variable by setting to one the max of the given vector and to zero the rest of the entries.
Assumes an 1x[number of categories] array (due to one-hot encoding) as an input
"""
rounded_values = np.zeros(value_array.shape)
rounded_values[np.argmax(value_array)] = 1
return rounded_values | def function[round, parameter[self, value_array]]:
constant[
Rounds a categorical variable by setting to one the max of the given vector and to zero the rest of the entries.
Assumes an 1x[number of categories] array (due to one-hot encoding) as an input
]
variable[rounded_values] assign[=] call[name[np].zeros, parameter[name[value_array].shape]]
call[name[rounded_values]][call[name[np].argmax, parameter[name[value_array]]]] assign[=] constant[1]
return[name[rounded_values]] | keyword[def] identifier[round] ( identifier[self] , identifier[value_array] ):
literal[string]
identifier[rounded_values] = identifier[np] . identifier[zeros] ( identifier[value_array] . identifier[shape] )
identifier[rounded_values] [ identifier[np] . identifier[argmax] ( identifier[value_array] )]= literal[int]
keyword[return] identifier[rounded_values] | def round(self, value_array):
"""
Rounds a categorical variable by setting to one the max of the given vector and to zero the rest of the entries.
Assumes an 1x[number of categories] array (due to one-hot encoding) as an input
"""
rounded_values = np.zeros(value_array.shape)
rounded_values[np.argmax(value_array)] = 1
return rounded_values |
def parse_blockwise(value):
"""
Parse Blockwise option.
:param value: option value
:return: num, m, size
"""
length = byte_len(value)
if length == 1:
num = value & 0xF0
num >>= 4
m = value & 0x08
m >>= 3
size = value & 0x07
elif length == 2:
num = value & 0xFFF0
num >>= 4
m = value & 0x0008
m >>= 3
size = value & 0x0007
else:
num = value & 0xFFFFF0
num >>= 4
m = value & 0x000008
m >>= 3
size = value & 0x000007
return num, int(m), pow(2, (size + 4)) | def function[parse_blockwise, parameter[value]]:
constant[
Parse Blockwise option.
:param value: option value
:return: num, m, size
]
variable[length] assign[=] call[name[byte_len], parameter[name[value]]]
if compare[name[length] equal[==] constant[1]] begin[:]
variable[num] assign[=] binary_operation[name[value] <ast.BitAnd object at 0x7da2590d6b60> constant[240]]
<ast.AugAssign object at 0x7da207f033a0>
variable[m] assign[=] binary_operation[name[value] <ast.BitAnd object at 0x7da2590d6b60> constant[8]]
<ast.AugAssign object at 0x7da207f02980>
variable[size] assign[=] binary_operation[name[value] <ast.BitAnd object at 0x7da2590d6b60> constant[7]]
return[tuple[[<ast.Name object at 0x7da1b06d2800>, <ast.Call object at 0x7da1b06d2ad0>, <ast.Call object at 0x7da1b06d1b10>]]] | keyword[def] identifier[parse_blockwise] ( identifier[value] ):
literal[string]
identifier[length] = identifier[byte_len] ( identifier[value] )
keyword[if] identifier[length] == literal[int] :
identifier[num] = identifier[value] & literal[int]
identifier[num] >>= literal[int]
identifier[m] = identifier[value] & literal[int]
identifier[m] >>= literal[int]
identifier[size] = identifier[value] & literal[int]
keyword[elif] identifier[length] == literal[int] :
identifier[num] = identifier[value] & literal[int]
identifier[num] >>= literal[int]
identifier[m] = identifier[value] & literal[int]
identifier[m] >>= literal[int]
identifier[size] = identifier[value] & literal[int]
keyword[else] :
identifier[num] = identifier[value] & literal[int]
identifier[num] >>= literal[int]
identifier[m] = identifier[value] & literal[int]
identifier[m] >>= literal[int]
identifier[size] = identifier[value] & literal[int]
keyword[return] identifier[num] , identifier[int] ( identifier[m] ), identifier[pow] ( literal[int] ,( identifier[size] + literal[int] )) | def parse_blockwise(value):
"""
Parse Blockwise option.
:param value: option value
:return: num, m, size
"""
length = byte_len(value)
if length == 1:
num = value & 240
num >>= 4
m = value & 8
m >>= 3
size = value & 7 # depends on [control=['if'], data=[]]
elif length == 2:
num = value & 65520
num >>= 4
m = value & 8
m >>= 3
size = value & 7 # depends on [control=['if'], data=[]]
else:
num = value & 16777200
num >>= 4
m = value & 8
m >>= 3
size = value & 7
return (num, int(m), pow(2, size + 4)) |
def get_mentions(status_dict, exclude=[]):
"""
Given a status dictionary, return all people mentioned in the toot,
excluding those in the list passed in exclude.
"""
# Canonicalise the exclusion dictionary by lowercasing all names and
# removing leading @'s
for i, user in enumerate(exclude):
user = user.casefold()
if user[0] == "@":
user = user[1:]
exclude[i] = user
users = [user["username"] for user in status_dict["mentions"]
if user["username"].casefold() not in exclude]
return users | def function[get_mentions, parameter[status_dict, exclude]]:
constant[
Given a status dictionary, return all people mentioned in the toot,
excluding those in the list passed in exclude.
]
for taget[tuple[[<ast.Name object at 0x7da1b05059f0>, <ast.Name object at 0x7da1b0506110>]]] in starred[call[name[enumerate], parameter[name[exclude]]]] begin[:]
variable[user] assign[=] call[name[user].casefold, parameter[]]
if compare[call[name[user]][constant[0]] equal[==] constant[@]] begin[:]
variable[user] assign[=] call[name[user]][<ast.Slice object at 0x7da1b0507280>]
call[name[exclude]][name[i]] assign[=] name[user]
variable[users] assign[=] <ast.ListComp object at 0x7da1b0505f90>
return[name[users]] | keyword[def] identifier[get_mentions] ( identifier[status_dict] , identifier[exclude] =[]):
literal[string]
keyword[for] identifier[i] , identifier[user] keyword[in] identifier[enumerate] ( identifier[exclude] ):
identifier[user] = identifier[user] . identifier[casefold] ()
keyword[if] identifier[user] [ literal[int] ]== literal[string] :
identifier[user] = identifier[user] [ literal[int] :]
identifier[exclude] [ identifier[i] ]= identifier[user]
identifier[users] =[ identifier[user] [ literal[string] ] keyword[for] identifier[user] keyword[in] identifier[status_dict] [ literal[string] ]
keyword[if] identifier[user] [ literal[string] ]. identifier[casefold] () keyword[not] keyword[in] identifier[exclude] ]
keyword[return] identifier[users] | def get_mentions(status_dict, exclude=[]):
"""
Given a status dictionary, return all people mentioned in the toot,
excluding those in the list passed in exclude.
"""
# Canonicalise the exclusion dictionary by lowercasing all names and
# removing leading @'s
for (i, user) in enumerate(exclude):
user = user.casefold()
if user[0] == '@':
user = user[1:] # depends on [control=['if'], data=[]]
exclude[i] = user # depends on [control=['for'], data=[]]
users = [user['username'] for user in status_dict['mentions'] if user['username'].casefold() not in exclude]
return users |
def scrypt_mcf(password, salt=None, N=SCRYPT_N, r=SCRYPT_r, p=SCRYPT_p,
prefix=SCRYPT_MCF_PREFIX_DEFAULT):
"""Derives a Modular Crypt Format hash using the scrypt KDF
Parameter space is smaller than for scrypt():
N must be a power of two larger than 1 but no larger than 2 ** 31
r and p must be positive numbers between 1 and 255
Salt must be a byte string 1-16 bytes long.
If no salt is given, a random salt of 128+ bits is used. (Recommended.)
"""
return mcf_mod.scrypt_mcf(scrypt, password, salt, N, r, p, prefix) | def function[scrypt_mcf, parameter[password, salt, N, r, p, prefix]]:
constant[Derives a Modular Crypt Format hash using the scrypt KDF
Parameter space is smaller than for scrypt():
N must be a power of two larger than 1 but no larger than 2 ** 31
r and p must be positive numbers between 1 and 255
Salt must be a byte string 1-16 bytes long.
If no salt is given, a random salt of 128+ bits is used. (Recommended.)
]
return[call[name[mcf_mod].scrypt_mcf, parameter[name[scrypt], name[password], name[salt], name[N], name[r], name[p], name[prefix]]]] | keyword[def] identifier[scrypt_mcf] ( identifier[password] , identifier[salt] = keyword[None] , identifier[N] = identifier[SCRYPT_N] , identifier[r] = identifier[SCRYPT_r] , identifier[p] = identifier[SCRYPT_p] ,
identifier[prefix] = identifier[SCRYPT_MCF_PREFIX_DEFAULT] ):
literal[string]
keyword[return] identifier[mcf_mod] . identifier[scrypt_mcf] ( identifier[scrypt] , identifier[password] , identifier[salt] , identifier[N] , identifier[r] , identifier[p] , identifier[prefix] ) | def scrypt_mcf(password, salt=None, N=SCRYPT_N, r=SCRYPT_r, p=SCRYPT_p, prefix=SCRYPT_MCF_PREFIX_DEFAULT):
"""Derives a Modular Crypt Format hash using the scrypt KDF
Parameter space is smaller than for scrypt():
N must be a power of two larger than 1 but no larger than 2 ** 31
r and p must be positive numbers between 1 and 255
Salt must be a byte string 1-16 bytes long.
If no salt is given, a random salt of 128+ bits is used. (Recommended.)
"""
return mcf_mod.scrypt_mcf(scrypt, password, salt, N, r, p, prefix) |
def allskyfinder(self, figsize=(14, 7), **kwargs):
'''
Plot an all-sky finder chart. This *does* create a new figure.
'''
plt.figure(figsize=figsize)
scatter = self.plot(**kwargs)
plt.xlabel(r'Right Ascension ($^\circ$)'); plt.ylabel(r'Declination ($^\circ$)')
#plt.title('{} in {:.1f}'.format(self.name, epoch))
plt.xlim(0, 360)
plt.ylim(-90,90)
return scatter | def function[allskyfinder, parameter[self, figsize]]:
constant[
Plot an all-sky finder chart. This *does* create a new figure.
]
call[name[plt].figure, parameter[]]
variable[scatter] assign[=] call[name[self].plot, parameter[]]
call[name[plt].xlabel, parameter[constant[Right Ascension ($^\circ$)]]]
call[name[plt].ylabel, parameter[constant[Declination ($^\circ$)]]]
call[name[plt].xlim, parameter[constant[0], constant[360]]]
call[name[plt].ylim, parameter[<ast.UnaryOp object at 0x7da18fe927a0>, constant[90]]]
return[name[scatter]] | keyword[def] identifier[allskyfinder] ( identifier[self] , identifier[figsize] =( literal[int] , literal[int] ),** identifier[kwargs] ):
literal[string]
identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
identifier[scatter] = identifier[self] . identifier[plot] (** identifier[kwargs] )
identifier[plt] . identifier[xlabel] ( literal[string] ); identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] . identifier[xlim] ( literal[int] , literal[int] )
identifier[plt] . identifier[ylim] (- literal[int] , literal[int] )
keyword[return] identifier[scatter] | def allskyfinder(self, figsize=(14, 7), **kwargs):
"""
Plot an all-sky finder chart. This *does* create a new figure.
"""
plt.figure(figsize=figsize)
scatter = self.plot(**kwargs)
plt.xlabel('Right Ascension ($^\\circ$)')
plt.ylabel('Declination ($^\\circ$)')
#plt.title('{} in {:.1f}'.format(self.name, epoch))
plt.xlim(0, 360)
plt.ylim(-90, 90)
return scatter |
def http_session(self):
"""HTTP Session property
:return: vk_requests.utils.VerboseHTTPSession instance
"""
if self._http_session is None:
session = VerboseHTTPSession()
session.headers.update(self.DEFAULT_HTTP_HEADERS)
self._http_session = session
return self._http_session | def function[http_session, parameter[self]]:
constant[HTTP Session property
:return: vk_requests.utils.VerboseHTTPSession instance
]
if compare[name[self]._http_session is constant[None]] begin[:]
variable[session] assign[=] call[name[VerboseHTTPSession], parameter[]]
call[name[session].headers.update, parameter[name[self].DEFAULT_HTTP_HEADERS]]
name[self]._http_session assign[=] name[session]
return[name[self]._http_session] | keyword[def] identifier[http_session] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_http_session] keyword[is] keyword[None] :
identifier[session] = identifier[VerboseHTTPSession] ()
identifier[session] . identifier[headers] . identifier[update] ( identifier[self] . identifier[DEFAULT_HTTP_HEADERS] )
identifier[self] . identifier[_http_session] = identifier[session]
keyword[return] identifier[self] . identifier[_http_session] | def http_session(self):
"""HTTP Session property
:return: vk_requests.utils.VerboseHTTPSession instance
"""
if self._http_session is None:
session = VerboseHTTPSession()
session.headers.update(self.DEFAULT_HTTP_HEADERS)
self._http_session = session # depends on [control=['if'], data=[]]
return self._http_session |
def align_transcriptome(fastq_file, pair_file, ref_file, data):
"""
bwa mem with settings for aligning to the transcriptome for eXpress/RSEM/etc
"""
work_bam = dd.get_work_bam(data)
base, ext = os.path.splitext(work_bam)
out_file = base + ".transcriptome" + ext
if utils.file_exists(out_file):
data = dd.set_transcriptome_bam(data, out_file)
return data
# bwa mem needs phred+33 quality, so convert if it is Illumina
if dd.get_quality_format(data).lower() == "illumina":
logger.info("bwa mem does not support the phred+64 quality format, "
"converting %s and %s to phred+33.")
fastq_file = fastq.groom(fastq_file, data, in_qual="fastq-illumina")
if pair_file:
pair_file = fastq.groom(pair_file, data, in_qual="fastq-illumina")
bwa = config_utils.get_program("bwa", data["config"])
gtf_file = dd.get_gtf_file(data)
gtf_fasta = index_transcriptome(gtf_file, ref_file, data)
args = " ".join(_bwa_args_from_config(data["config"]))
num_cores = data["config"]["algorithm"].get("num_cores", 1)
samtools = config_utils.get_program("samtools", data["config"])
cmd = ("{bwa} mem {args} -a -t {num_cores} {gtf_fasta} {fastq_file} "
"{pair_file} ")
with file_transaction(data, out_file) as tx_out_file:
message = "Aligning %s and %s to the transcriptome." % (fastq_file, pair_file)
cmd += "| " + postalign.sam_to_sortbam_cl(data, tx_out_file, name_sort=True)
do.run(cmd.format(**locals()), message)
data = dd.set_transcriptome_bam(data, out_file)
return data | def function[align_transcriptome, parameter[fastq_file, pair_file, ref_file, data]]:
constant[
bwa mem with settings for aligning to the transcriptome for eXpress/RSEM/etc
]
variable[work_bam] assign[=] call[name[dd].get_work_bam, parameter[name[data]]]
<ast.Tuple object at 0x7da18bcc9a80> assign[=] call[name[os].path.splitext, parameter[name[work_bam]]]
variable[out_file] assign[=] binary_operation[binary_operation[name[base] + constant[.transcriptome]] + name[ext]]
if call[name[utils].file_exists, parameter[name[out_file]]] begin[:]
variable[data] assign[=] call[name[dd].set_transcriptome_bam, parameter[name[data], name[out_file]]]
return[name[data]]
if compare[call[call[name[dd].get_quality_format, parameter[name[data]]].lower, parameter[]] equal[==] constant[illumina]] begin[:]
call[name[logger].info, parameter[constant[bwa mem does not support the phred+64 quality format, converting %s and %s to phred+33.]]]
variable[fastq_file] assign[=] call[name[fastq].groom, parameter[name[fastq_file], name[data]]]
if name[pair_file] begin[:]
variable[pair_file] assign[=] call[name[fastq].groom, parameter[name[pair_file], name[data]]]
variable[bwa] assign[=] call[name[config_utils].get_program, parameter[constant[bwa], call[name[data]][constant[config]]]]
variable[gtf_file] assign[=] call[name[dd].get_gtf_file, parameter[name[data]]]
variable[gtf_fasta] assign[=] call[name[index_transcriptome], parameter[name[gtf_file], name[ref_file], name[data]]]
variable[args] assign[=] call[constant[ ].join, parameter[call[name[_bwa_args_from_config], parameter[call[name[data]][constant[config]]]]]]
variable[num_cores] assign[=] call[call[call[name[data]][constant[config]]][constant[algorithm]].get, parameter[constant[num_cores], constant[1]]]
variable[samtools] assign[=] call[name[config_utils].get_program, parameter[constant[samtools], call[name[data]][constant[config]]]]
variable[cmd] assign[=] constant[{bwa} mem {args} -a -t {num_cores} {gtf_fasta} {fastq_file} {pair_file} ]
with call[name[file_transaction], parameter[name[data], name[out_file]]] begin[:]
variable[message] assign[=] binary_operation[constant[Aligning %s and %s to the transcriptome.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bccad40>, <ast.Name object at 0x7da18bccb2e0>]]]
<ast.AugAssign object at 0x7da18bccb7c0>
call[name[do].run, parameter[call[name[cmd].format, parameter[]], name[message]]]
variable[data] assign[=] call[name[dd].set_transcriptome_bam, parameter[name[data], name[out_file]]]
return[name[data]] | keyword[def] identifier[align_transcriptome] ( identifier[fastq_file] , identifier[pair_file] , identifier[ref_file] , identifier[data] ):
literal[string]
identifier[work_bam] = identifier[dd] . identifier[get_work_bam] ( identifier[data] )
identifier[base] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[work_bam] )
identifier[out_file] = identifier[base] + literal[string] + identifier[ext]
keyword[if] identifier[utils] . identifier[file_exists] ( identifier[out_file] ):
identifier[data] = identifier[dd] . identifier[set_transcriptome_bam] ( identifier[data] , identifier[out_file] )
keyword[return] identifier[data]
keyword[if] identifier[dd] . identifier[get_quality_format] ( identifier[data] ). identifier[lower] ()== literal[string] :
identifier[logger] . identifier[info] ( literal[string]
literal[string] )
identifier[fastq_file] = identifier[fastq] . identifier[groom] ( identifier[fastq_file] , identifier[data] , identifier[in_qual] = literal[string] )
keyword[if] identifier[pair_file] :
identifier[pair_file] = identifier[fastq] . identifier[groom] ( identifier[pair_file] , identifier[data] , identifier[in_qual] = literal[string] )
identifier[bwa] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ])
identifier[gtf_file] = identifier[dd] . identifier[get_gtf_file] ( identifier[data] )
identifier[gtf_fasta] = identifier[index_transcriptome] ( identifier[gtf_file] , identifier[ref_file] , identifier[data] )
identifier[args] = literal[string] . identifier[join] ( identifier[_bwa_args_from_config] ( identifier[data] [ literal[string] ]))
identifier[num_cores] = identifier[data] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] , literal[int] )
identifier[samtools] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ])
identifier[cmd] =( literal[string]
literal[string] )
keyword[with] identifier[file_transaction] ( identifier[data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] :
identifier[message] = literal[string] %( identifier[fastq_file] , identifier[pair_file] )
identifier[cmd] += literal[string] + identifier[postalign] . identifier[sam_to_sortbam_cl] ( identifier[data] , identifier[tx_out_file] , identifier[name_sort] = keyword[True] )
identifier[do] . identifier[run] ( identifier[cmd] . identifier[format] (** identifier[locals] ()), identifier[message] )
identifier[data] = identifier[dd] . identifier[set_transcriptome_bam] ( identifier[data] , identifier[out_file] )
keyword[return] identifier[data] | def align_transcriptome(fastq_file, pair_file, ref_file, data):
"""
bwa mem with settings for aligning to the transcriptome for eXpress/RSEM/etc
"""
work_bam = dd.get_work_bam(data)
(base, ext) = os.path.splitext(work_bam)
out_file = base + '.transcriptome' + ext
if utils.file_exists(out_file):
data = dd.set_transcriptome_bam(data, out_file)
return data # depends on [control=['if'], data=[]]
# bwa mem needs phred+33 quality, so convert if it is Illumina
if dd.get_quality_format(data).lower() == 'illumina':
logger.info('bwa mem does not support the phred+64 quality format, converting %s and %s to phred+33.')
fastq_file = fastq.groom(fastq_file, data, in_qual='fastq-illumina')
if pair_file:
pair_file = fastq.groom(pair_file, data, in_qual='fastq-illumina') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
bwa = config_utils.get_program('bwa', data['config'])
gtf_file = dd.get_gtf_file(data)
gtf_fasta = index_transcriptome(gtf_file, ref_file, data)
args = ' '.join(_bwa_args_from_config(data['config']))
num_cores = data['config']['algorithm'].get('num_cores', 1)
samtools = config_utils.get_program('samtools', data['config'])
cmd = '{bwa} mem {args} -a -t {num_cores} {gtf_fasta} {fastq_file} {pair_file} '
with file_transaction(data, out_file) as tx_out_file:
message = 'Aligning %s and %s to the transcriptome.' % (fastq_file, pair_file)
cmd += '| ' + postalign.sam_to_sortbam_cl(data, tx_out_file, name_sort=True)
do.run(cmd.format(**locals()), message) # depends on [control=['with'], data=['tx_out_file']]
data = dd.set_transcriptome_bam(data, out_file)
return data |
def get(self, query, *parameters, **kwargs):
"""Returns the first row returned for the given query."""
rows = self.query(query, *parameters, **kwargs)
if not rows:
return None
elif len(rows) > 1:
raise ValueError('Multiple rows returned for get() query')
else:
return rows[0] | def function[get, parameter[self, query]]:
constant[Returns the first row returned for the given query.]
variable[rows] assign[=] call[name[self].query, parameter[name[query], <ast.Starred object at 0x7da2044c1660>]]
if <ast.UnaryOp object at 0x7da2044c0c10> begin[:]
return[constant[None]] | keyword[def] identifier[get] ( identifier[self] , identifier[query] ,* identifier[parameters] ,** identifier[kwargs] ):
literal[string]
identifier[rows] = identifier[self] . identifier[query] ( identifier[query] ,* identifier[parameters] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[rows] :
keyword[return] keyword[None]
keyword[elif] identifier[len] ( identifier[rows] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[return] identifier[rows] [ literal[int] ] | def get(self, query, *parameters, **kwargs):
"""Returns the first row returned for the given query."""
rows = self.query(query, *parameters, **kwargs)
if not rows:
return None # depends on [control=['if'], data=[]]
elif len(rows) > 1:
raise ValueError('Multiple rows returned for get() query') # depends on [control=['if'], data=[]]
else:
return rows[0] |
def get_hw_virt_ex_property(self, property_p):
"""Returns the value of the specified hardware virtualization boolean property.
in property_p of type :class:`HWVirtExPropertyType`
Property type to query.
return value of type bool
Property value.
raises :class:`OleErrorInvalidarg`
Invalid property.
"""
if not isinstance(property_p, HWVirtExPropertyType):
raise TypeError("property_p can only be an instance of type HWVirtExPropertyType")
value = self._call("getHWVirtExProperty",
in_p=[property_p])
return value | def function[get_hw_virt_ex_property, parameter[self, property_p]]:
constant[Returns the value of the specified hardware virtualization boolean property.
in property_p of type :class:`HWVirtExPropertyType`
Property type to query.
return value of type bool
Property value.
raises :class:`OleErrorInvalidarg`
Invalid property.
]
if <ast.UnaryOp object at 0x7da1b26ac190> begin[:]
<ast.Raise object at 0x7da1b26ad7e0>
variable[value] assign[=] call[name[self]._call, parameter[constant[getHWVirtExProperty]]]
return[name[value]] | keyword[def] identifier[get_hw_virt_ex_property] ( identifier[self] , identifier[property_p] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[property_p] , identifier[HWVirtExPropertyType] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[value] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[property_p] ])
keyword[return] identifier[value] | def get_hw_virt_ex_property(self, property_p):
"""Returns the value of the specified hardware virtualization boolean property.
in property_p of type :class:`HWVirtExPropertyType`
Property type to query.
return value of type bool
Property value.
raises :class:`OleErrorInvalidarg`
Invalid property.
"""
if not isinstance(property_p, HWVirtExPropertyType):
raise TypeError('property_p can only be an instance of type HWVirtExPropertyType') # depends on [control=['if'], data=[]]
value = self._call('getHWVirtExProperty', in_p=[property_p])
return value |
def change_vartype(self, vartype, energy_offset=0.0, inplace=True):
"""Return the :class:`SampleSet` with the given vartype.
Args:
vartype (:class:`.Vartype`/str/set):
Variable type to use for the new :class:`SampleSet`. Accepted input values:
* :class:`.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}``
* :class:`.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}``
energy_offset (number, optional, defaul=0.0):
Constant value applied to the 'energy' field of :attr:`SampleSet.record`.
inplace (bool, optional, default=True):
If True, the instantiated :class:`SampleSet` is updated; otherwise, a new
:class:`SampleSet` is returned.
Returns:
:obj:`.SampleSet`: SampleSet with changed vartype. If `inplace` is True, returns itself.
Examples:
This example creates a binary copy of a spin-valued :class:`SampleSet`.
>>> import dimod
...
>>> sampleset = dimod.ExactSolver().sample_ising({'a': -0.5, 'b': 1.0}, {('a', 'b'): -1})
>>> sampleset_binary = sampleset.change_vartype(dimod.BINARY, energy_offset=1.0, inplace=False)
>>> sampleset_binary.vartype is dimod.BINARY
True
>>> for datum in sampleset_binary.data(fields=['sample', 'energy', 'num_occurrences']): # doctest: +SKIP
... print(datum)
Sample(sample={'a': 0, 'b': 0}, energy=-0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 0}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 1}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 0, 'b': 1}, energy=3.5, num_occurrences=1)
"""
if not inplace:
return self.copy().change_vartype(vartype, energy_offset, inplace=True)
if energy_offset:
self.record.energy = self.record.energy + energy_offset
if vartype is self.vartype:
return self # we're done!
if vartype is Vartype.SPIN and self.vartype is Vartype.BINARY:
self.record.sample = 2 * self.record.sample - 1
self._vartype = vartype
elif vartype is Vartype.BINARY and self.vartype is Vartype.SPIN:
self.record.sample = (self.record.sample + 1) // 2
self._vartype = vartype
else:
raise ValueError("Cannot convert from {} to {}".format(self.vartype, vartype))
return self | def function[change_vartype, parameter[self, vartype, energy_offset, inplace]]:
constant[Return the :class:`SampleSet` with the given vartype.
Args:
vartype (:class:`.Vartype`/str/set):
Variable type to use for the new :class:`SampleSet`. Accepted input values:
* :class:`.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}``
* :class:`.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}``
energy_offset (number, optional, defaul=0.0):
Constant value applied to the 'energy' field of :attr:`SampleSet.record`.
inplace (bool, optional, default=True):
If True, the instantiated :class:`SampleSet` is updated; otherwise, a new
:class:`SampleSet` is returned.
Returns:
:obj:`.SampleSet`: SampleSet with changed vartype. If `inplace` is True, returns itself.
Examples:
This example creates a binary copy of a spin-valued :class:`SampleSet`.
>>> import dimod
...
>>> sampleset = dimod.ExactSolver().sample_ising({'a': -0.5, 'b': 1.0}, {('a', 'b'): -1})
>>> sampleset_binary = sampleset.change_vartype(dimod.BINARY, energy_offset=1.0, inplace=False)
>>> sampleset_binary.vartype is dimod.BINARY
True
>>> for datum in sampleset_binary.data(fields=['sample', 'energy', 'num_occurrences']): # doctest: +SKIP
... print(datum)
Sample(sample={'a': 0, 'b': 0}, energy=-0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 0}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 1}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 0, 'b': 1}, energy=3.5, num_occurrences=1)
]
if <ast.UnaryOp object at 0x7da1b07152d0> begin[:]
return[call[call[name[self].copy, parameter[]].change_vartype, parameter[name[vartype], name[energy_offset]]]]
if name[energy_offset] begin[:]
name[self].record.energy assign[=] binary_operation[name[self].record.energy + name[energy_offset]]
if compare[name[vartype] is name[self].vartype] begin[:]
return[name[self]]
if <ast.BoolOp object at 0x7da1b0716b60> begin[:]
name[self].record.sample assign[=] binary_operation[binary_operation[constant[2] * name[self].record.sample] - constant[1]]
name[self]._vartype assign[=] name[vartype]
return[name[self]] | keyword[def] identifier[change_vartype] ( identifier[self] , identifier[vartype] , identifier[energy_offset] = literal[int] , identifier[inplace] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[inplace] :
keyword[return] identifier[self] . identifier[copy] (). identifier[change_vartype] ( identifier[vartype] , identifier[energy_offset] , identifier[inplace] = keyword[True] )
keyword[if] identifier[energy_offset] :
identifier[self] . identifier[record] . identifier[energy] = identifier[self] . identifier[record] . identifier[energy] + identifier[energy_offset]
keyword[if] identifier[vartype] keyword[is] identifier[self] . identifier[vartype] :
keyword[return] identifier[self]
keyword[if] identifier[vartype] keyword[is] identifier[Vartype] . identifier[SPIN] keyword[and] identifier[self] . identifier[vartype] keyword[is] identifier[Vartype] . identifier[BINARY] :
identifier[self] . identifier[record] . identifier[sample] = literal[int] * identifier[self] . identifier[record] . identifier[sample] - literal[int]
identifier[self] . identifier[_vartype] = identifier[vartype]
keyword[elif] identifier[vartype] keyword[is] identifier[Vartype] . identifier[BINARY] keyword[and] identifier[self] . identifier[vartype] keyword[is] identifier[Vartype] . identifier[SPIN] :
identifier[self] . identifier[record] . identifier[sample] =( identifier[self] . identifier[record] . identifier[sample] + literal[int] )// literal[int]
identifier[self] . identifier[_vartype] = identifier[vartype]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[vartype] , identifier[vartype] ))
keyword[return] identifier[self] | def change_vartype(self, vartype, energy_offset=0.0, inplace=True):
"""Return the :class:`SampleSet` with the given vartype.
Args:
vartype (:class:`.Vartype`/str/set):
Variable type to use for the new :class:`SampleSet`. Accepted input values:
* :class:`.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}``
* :class:`.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}``
energy_offset (number, optional, defaul=0.0):
Constant value applied to the 'energy' field of :attr:`SampleSet.record`.
inplace (bool, optional, default=True):
If True, the instantiated :class:`SampleSet` is updated; otherwise, a new
:class:`SampleSet` is returned.
Returns:
:obj:`.SampleSet`: SampleSet with changed vartype. If `inplace` is True, returns itself.
Examples:
This example creates a binary copy of a spin-valued :class:`SampleSet`.
>>> import dimod
...
>>> sampleset = dimod.ExactSolver().sample_ising({'a': -0.5, 'b': 1.0}, {('a', 'b'): -1})
>>> sampleset_binary = sampleset.change_vartype(dimod.BINARY, energy_offset=1.0, inplace=False)
>>> sampleset_binary.vartype is dimod.BINARY
True
>>> for datum in sampleset_binary.data(fields=['sample', 'energy', 'num_occurrences']): # doctest: +SKIP
... print(datum)
Sample(sample={'a': 0, 'b': 0}, energy=-0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 0}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 1, 'b': 1}, energy=0.5, num_occurrences=1)
Sample(sample={'a': 0, 'b': 1}, energy=3.5, num_occurrences=1)
"""
if not inplace:
return self.copy().change_vartype(vartype, energy_offset, inplace=True) # depends on [control=['if'], data=[]]
if energy_offset:
self.record.energy = self.record.energy + energy_offset # depends on [control=['if'], data=[]]
if vartype is self.vartype:
return self # we're done! # depends on [control=['if'], data=[]]
if vartype is Vartype.SPIN and self.vartype is Vartype.BINARY:
self.record.sample = 2 * self.record.sample - 1
self._vartype = vartype # depends on [control=['if'], data=[]]
elif vartype is Vartype.BINARY and self.vartype is Vartype.SPIN:
self.record.sample = (self.record.sample + 1) // 2
self._vartype = vartype # depends on [control=['if'], data=[]]
else:
raise ValueError('Cannot convert from {} to {}'.format(self.vartype, vartype))
return self |
def leading_whitespace_in_current_line(self):
""" The leading whitespace in the left margin of the current line. """
current_line = self.current_line
length = len(current_line) - len(current_line.lstrip())
return current_line[:length] | def function[leading_whitespace_in_current_line, parameter[self]]:
constant[ The leading whitespace in the left margin of the current line. ]
variable[current_line] assign[=] name[self].current_line
variable[length] assign[=] binary_operation[call[name[len], parameter[name[current_line]]] - call[name[len], parameter[call[name[current_line].lstrip, parameter[]]]]]
return[call[name[current_line]][<ast.Slice object at 0x7da1b08a7af0>]] | keyword[def] identifier[leading_whitespace_in_current_line] ( identifier[self] ):
literal[string]
identifier[current_line] = identifier[self] . identifier[current_line]
identifier[length] = identifier[len] ( identifier[current_line] )- identifier[len] ( identifier[current_line] . identifier[lstrip] ())
keyword[return] identifier[current_line] [: identifier[length] ] | def leading_whitespace_in_current_line(self):
""" The leading whitespace in the left margin of the current line. """
current_line = self.current_line
length = len(current_line) - len(current_line.lstrip())
return current_line[:length] |
async def get_movie(self, id_):
"""Retrieve movie data by ID.
Arguments:
id_ (:py:class:`int`): The movie's TMDb ID.
Returns:
:py:class:`~.Movie`: The requested movie.
"""
url = self.url_builder(
'movie/{movie_id}',
dict(movie_id=id_),
url_params=OrderedDict(append_to_response='credits'),
)
data = await self.get_data(url)
if data is None:
return
return Movie.from_json(data, self.config['data'].get('images')) | <ast.AsyncFunctionDef object at 0x7da1b1f35fc0> | keyword[async] keyword[def] identifier[get_movie] ( identifier[self] , identifier[id_] ):
literal[string]
identifier[url] = identifier[self] . identifier[url_builder] (
literal[string] ,
identifier[dict] ( identifier[movie_id] = identifier[id_] ),
identifier[url_params] = identifier[OrderedDict] ( identifier[append_to_response] = literal[string] ),
)
identifier[data] = keyword[await] identifier[self] . identifier[get_data] ( identifier[url] )
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[return]
keyword[return] identifier[Movie] . identifier[from_json] ( identifier[data] , identifier[self] . identifier[config] [ literal[string] ]. identifier[get] ( literal[string] )) | async def get_movie(self, id_):
"""Retrieve movie data by ID.
Arguments:
id_ (:py:class:`int`): The movie's TMDb ID.
Returns:
:py:class:`~.Movie`: The requested movie.
"""
url = self.url_builder('movie/{movie_id}', dict(movie_id=id_), url_params=OrderedDict(append_to_response='credits'))
data = await self.get_data(url)
if data is None:
return # depends on [control=['if'], data=[]]
return Movie.from_json(data, self.config['data'].get('images')) |
def getConfigRoot(cls, create = False):
"""
Return the mapped configuration root node
"""
try:
return manager.gettree(getattr(cls, 'configkey'), create)
except AttributeError:
return None | def function[getConfigRoot, parameter[cls, create]]:
constant[
Return the mapped configuration root node
]
<ast.Try object at 0x7da20c6a9a20> | keyword[def] identifier[getConfigRoot] ( identifier[cls] , identifier[create] = keyword[False] ):
literal[string]
keyword[try] :
keyword[return] identifier[manager] . identifier[gettree] ( identifier[getattr] ( identifier[cls] , literal[string] ), identifier[create] )
keyword[except] identifier[AttributeError] :
keyword[return] keyword[None] | def getConfigRoot(cls, create=False):
"""
Return the mapped configuration root node
"""
try:
return manager.gettree(getattr(cls, 'configkey'), create) # depends on [control=['try'], data=[]]
except AttributeError:
return None # depends on [control=['except'], data=[]] |
def _parse_oracle(lines):
"""
Performs the actual file parsing, returning a dict of the config values
in a given Oracle DB config file.
Despite their differences, the two filetypes are similar enough to
allow idential parsing.
"""
config = {}
for line in get_active_lines(lines):
# Check for NULL in line to begin control char removal
if '\00' in line:
line = cleanup.sub('', line)
if '=' in line:
(key, value) = line.split('=', 1)
key = key.strip(whitespace + '"\'').lower()
if ',' in line:
value = [s.strip(whitespace + '"\'').lower() for s in value.split(',')]
else:
value = value.strip(whitespace + '"\'').lower()
config[key] = value
return config | def function[_parse_oracle, parameter[lines]]:
constant[
Performs the actual file parsing, returning a dict of the config values
in a given Oracle DB config file.
Despite their differences, the two filetypes are similar enough to
allow idential parsing.
]
variable[config] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[name[get_active_lines], parameter[name[lines]]]] begin[:]
if compare[constant[ ] in name[line]] begin[:]
variable[line] assign[=] call[name[cleanup].sub, parameter[constant[], name[line]]]
if compare[constant[=] in name[line]] begin[:]
<ast.Tuple object at 0x7da18f811240> assign[=] call[name[line].split, parameter[constant[=], constant[1]]]
variable[key] assign[=] call[call[name[key].strip, parameter[binary_operation[name[whitespace] + constant["']]]].lower, parameter[]]
if compare[constant[,] in name[line]] begin[:]
variable[value] assign[=] <ast.ListComp object at 0x7da18f813d90>
call[name[config]][name[key]] assign[=] name[value]
return[name[config]] | keyword[def] identifier[_parse_oracle] ( identifier[lines] ):
literal[string]
identifier[config] ={}
keyword[for] identifier[line] keyword[in] identifier[get_active_lines] ( identifier[lines] ):
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[line] = identifier[cleanup] . identifier[sub] ( literal[string] , identifier[line] )
keyword[if] literal[string] keyword[in] identifier[line] :
( identifier[key] , identifier[value] )= identifier[line] . identifier[split] ( literal[string] , literal[int] )
identifier[key] = identifier[key] . identifier[strip] ( identifier[whitespace] + literal[string] ). identifier[lower] ()
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[value] =[ identifier[s] . identifier[strip] ( identifier[whitespace] + literal[string] ). identifier[lower] () keyword[for] identifier[s] keyword[in] identifier[value] . identifier[split] ( literal[string] )]
keyword[else] :
identifier[value] = identifier[value] . identifier[strip] ( identifier[whitespace] + literal[string] ). identifier[lower] ()
identifier[config] [ identifier[key] ]= identifier[value]
keyword[return] identifier[config] | def _parse_oracle(lines):
"""
Performs the actual file parsing, returning a dict of the config values
in a given Oracle DB config file.
Despite their differences, the two filetypes are similar enough to
allow idential parsing.
"""
config = {}
for line in get_active_lines(lines):
# Check for NULL in line to begin control char removal
if '\x00' in line:
line = cleanup.sub('', line) # depends on [control=['if'], data=['line']]
if '=' in line:
(key, value) = line.split('=', 1)
key = key.strip(whitespace + '"\'').lower()
if ',' in line:
value = [s.strip(whitespace + '"\'').lower() for s in value.split(',')] # depends on [control=['if'], data=[]]
else:
value = value.strip(whitespace + '"\'').lower()
config[key] = value # depends on [control=['if'], data=['line']] # depends on [control=['for'], data=['line']]
return config |
def createSharedLibBuilder(env):
"""This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
shared_lib = env['BUILDERS']['SharedLibrary']
except KeyError:
import SCons.Defaults
action_list = [ SCons.Defaults.SharedCheck,
SCons.Defaults.ShLinkAction,
LibSymlinksAction ]
shared_lib = SCons.Builder.Builder(action = action_list,
emitter = "$SHLIBEMITTER",
prefix = ShLibPrefixGenerator,
suffix = ShLibSuffixGenerator,
target_scanner = ProgramScanner,
src_suffix = '$SHOBJSUFFIX',
src_builder = 'SharedObject')
env['BUILDERS']['SharedLibrary'] = shared_lib
return shared_lib | def function[createSharedLibBuilder, parameter[env]]:
constant[This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
]
<ast.Try object at 0x7da18f09f6a0>
return[name[shared_lib]] | keyword[def] identifier[createSharedLibBuilder] ( identifier[env] ):
literal[string]
keyword[try] :
identifier[shared_lib] = identifier[env] [ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[import] identifier[SCons] . identifier[Defaults]
identifier[action_list] =[ identifier[SCons] . identifier[Defaults] . identifier[SharedCheck] ,
identifier[SCons] . identifier[Defaults] . identifier[ShLinkAction] ,
identifier[LibSymlinksAction] ]
identifier[shared_lib] = identifier[SCons] . identifier[Builder] . identifier[Builder] ( identifier[action] = identifier[action_list] ,
identifier[emitter] = literal[string] ,
identifier[prefix] = identifier[ShLibPrefixGenerator] ,
identifier[suffix] = identifier[ShLibSuffixGenerator] ,
identifier[target_scanner] = identifier[ProgramScanner] ,
identifier[src_suffix] = literal[string] ,
identifier[src_builder] = literal[string] )
identifier[env] [ literal[string] ][ literal[string] ]= identifier[shared_lib]
keyword[return] identifier[shared_lib] | def createSharedLibBuilder(env):
"""This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
shared_lib = env['BUILDERS']['SharedLibrary'] # depends on [control=['try'], data=[]]
except KeyError:
import SCons.Defaults
action_list = [SCons.Defaults.SharedCheck, SCons.Defaults.ShLinkAction, LibSymlinksAction]
shared_lib = SCons.Builder.Builder(action=action_list, emitter='$SHLIBEMITTER', prefix=ShLibPrefixGenerator, suffix=ShLibSuffixGenerator, target_scanner=ProgramScanner, src_suffix='$SHOBJSUFFIX', src_builder='SharedObject')
env['BUILDERS']['SharedLibrary'] = shared_lib # depends on [control=['except'], data=[]]
return shared_lib |
def _harvest_validate(self, userkwargs):
"""Validate and Plant user provided arguments
- Go through and plants the seedlings
for any user arguments provided.
- Validate the arguments, cleaning and adapting (valideer wise)
- Extract negatives "!" arguments
"""
# the valideer to parse the
# user arguemnts when watering
parser = {}
userkwargs.update(self.network_kwargs)
# a simple set of original provided argument keys (used in IGNORES)
original_kwargs = set(map(lambda k: k.split('_')[1] if k.find('_')>-1 else k, userkwargs.keys()))
# list of columns that are required from seeds
requires = []
# -------------
# Clean up Aggs
# -------------
for key in userkwargs.keys():
# agg example: "avg_total", "max_tax"
if key.find('_') > 0:
agg, base = tuple(key.split('_'))
if base in userkwargs:
if type(userkwargs[base]) is not list:
userkwargs[base] = [(None, userkwargs[base])]
userkwargs[base].append( (agg, userkwargs.pop(key)) )
else:
userkwargs[base] = [(agg, userkwargs.pop(key))]
# -----------------
# Process Arguments
# -----------------
for key, seed in self.arguments.iteritems():
# --------------
# Argument Alias
# --------------
if seed.get('alias') and key in userkwargs:
# pop the value form the user kwargs (to change the key later)
value = userkwargs.pop(key) if key in userkwargs else NotImplemented
# for duplicate keys
oldkey = key+""
# change the key
key = seed.get('alias')
# change the seed
seed = get(self.arguments, seed.get('alias'))
# set the new key:value
if value is not NotImplemented:
if key in userkwargs:
raise valideer.ValidationError("Argument alias already specified for `%s` via `%s`" % (oldkey, key), oldkey)
userkwargs[key] = value
# can provide multiple arguments
if key.endswith('[]'):
multi = True
key = key[:-2]
else:
multi = False
# get value(s) from user
if key in userkwargs:
value = userkwargs.pop(key)
elif seed.get('copy'):
value = userkwargs.get(seed.get('copy'))
else:
value = seed.get('default')
# no argument provided, lets continue)
if value is None or value == []:
if seed.get('required'):
raise valideer.ValidationError("missing required property: %s" % key, key)
else:
continue
# add requires
requires.extend(array(get(seed, 'requires', [])))
# -----------
# Inheritance
# -----------
# not permited from arguements yet. would need to happen above the ""PROCESS ARGUMENT"" block
# self._inherit(*array(get(seed, 'inherit', [])))
if type(value) is list and type(value[0]) is tuple:
# complex
for v in value:
ud, pd = self._harvest_args(key, seed, v, multi)
userkwargs.update(ud)
parser.update(pd)
else:
ud, pd = self._harvest_args(key, seed, value, multi)
userkwargs.update(ud)
parser.update(pd)
# ------------
# Ignored Keys
# ------------
for seed in self.seeds:
ignores = set(array(get(seed, 'ignore')))
if ignores:
if ignores & original_kwargs:
if not get(seed, 'silent'):
additionals = ignores & original_kwargs
raise valideer.ValidationError("additional properties: %s" % ",".join(additionals), additionals)
[userkwargs.pop(key) for key in ignores if key in userkwargs]
# -------------------------
# Custom Operators (part 1)
# -------------------------
operators = {}
for key, value in userkwargs.items():
rk = key
agg = None
if key.find('_')>-1:
agg, rk = tuple(key.split('_'))
seed = self.arguments.get(rk, self.arguments.get(rk+'[]'))
if seed:
if type(value) is list:
operators[key] = []
# need to remove the operator for validating
new_values = []
for v in value:
operator, v = self._operator(v, *seed.get('column', "").rsplit("::", 1))
new_values.append(v)
operators[key].append((agg, operator) if agg else operator)
userkwargs[key] = new_values
else:
operator, value = self._operator(value, *seed.get('column', "").rsplit("::", 1))
operators[key] = (agg, operator) if agg else operator
userkwargs[key] = value
# -----------------
# Plant Sort Method
# -----------------
if 'sortby' in userkwargs:
seed = self.arguments.get(userkwargs['sortby'].lower(), self.arguments.get(userkwargs['sortby'].lower()+'[]'))
if seed:
seed['id'] = str(userkwargs['sortby'].lower())
for r in set(requires):
if userkwargs.get(r) is None:
raise valideer.ValidationError("required property not set: %s" % r, r)
# --------
# Validate
# --------
parser = valideer.parse(parser, additional_properties=False)
validated = parser.validate(userkwargs, adapt=self.navigator.adapter())
validated.update(self.network_kwargs)
# operators validated
# --------------------------- | --------------------------------
# { {
# "type": ["!", "!"], "type": ['a', 'b'],
# "total": "<", "total": "50",
# "tax": ("avg, ">"), "tax": "1",
# "time": None "time": "2014"
# } }
return operators, validated | def function[_harvest_validate, parameter[self, userkwargs]]:
constant[Validate and Plant user provided arguments
- Go through and plants the seedlings
for any user arguments provided.
- Validate the arguments, cleaning and adapting (valideer wise)
- Extract negatives "!" arguments
]
variable[parser] assign[=] dictionary[[], []]
call[name[userkwargs].update, parameter[name[self].network_kwargs]]
variable[original_kwargs] assign[=] call[name[set], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da2044c2fe0>, call[name[userkwargs].keys, parameter[]]]]]]
variable[requires] assign[=] list[[]]
for taget[name[key]] in starred[call[name[userkwargs].keys, parameter[]]] begin[:]
if compare[call[name[key].find, parameter[constant[_]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da2044c3a00> assign[=] call[name[tuple], parameter[call[name[key].split, parameter[constant[_]]]]]
if compare[name[base] in name[userkwargs]] begin[:]
if compare[call[name[type], parameter[call[name[userkwargs]][name[base]]]] is_not name[list]] begin[:]
call[name[userkwargs]][name[base]] assign[=] list[[<ast.Tuple object at 0x7da2044c2860>]]
call[call[name[userkwargs]][name[base]].append, parameter[tuple[[<ast.Name object at 0x7da2044c3490>, <ast.Call object at 0x7da2044c3370>]]]]
for taget[tuple[[<ast.Name object at 0x7da2044c17b0>, <ast.Name object at 0x7da2044c1ea0>]]] in starred[call[name[self].arguments.iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da2044c1cf0> begin[:]
variable[value] assign[=] <ast.IfExp object at 0x7da2044c35b0>
variable[oldkey] assign[=] binary_operation[name[key] + constant[]]
variable[key] assign[=] call[name[seed].get, parameter[constant[alias]]]
variable[seed] assign[=] call[name[get], parameter[name[self].arguments, call[name[seed].get, parameter[constant[alias]]]]]
if compare[name[value] is_not name[NotImplemented]] begin[:]
if compare[name[key] in name[userkwargs]] begin[:]
<ast.Raise object at 0x7da2044c0610>
call[name[userkwargs]][name[key]] assign[=] name[value]
if call[name[key].endswith, parameter[constant[[]]]] begin[:]
variable[multi] assign[=] constant[True]
variable[key] assign[=] call[name[key]][<ast.Slice object at 0x7da2044c0a30>]
if compare[name[key] in name[userkwargs]] begin[:]
variable[value] assign[=] call[name[userkwargs].pop, parameter[name[key]]]
if <ast.BoolOp object at 0x7da2044c1e40> begin[:]
if call[name[seed].get, parameter[constant[required]]] begin[:]
<ast.Raise object at 0x7da2044c09d0>
call[name[requires].extend, parameter[call[name[array], parameter[call[name[get], parameter[name[seed], constant[requires], list[[]]]]]]]]
if <ast.BoolOp object at 0x7da2044c0790> begin[:]
for taget[name[v]] in starred[name[value]] begin[:]
<ast.Tuple object at 0x7da2044c3f40> assign[=] call[name[self]._harvest_args, parameter[name[key], name[seed], name[v], name[multi]]]
call[name[userkwargs].update, parameter[name[ud]]]
call[name[parser].update, parameter[name[pd]]]
for taget[name[seed]] in starred[name[self].seeds] begin[:]
variable[ignores] assign[=] call[name[set], parameter[call[name[array], parameter[call[name[get], parameter[name[seed], constant[ignore]]]]]]]
if name[ignores] begin[:]
if binary_operation[name[ignores] <ast.BitAnd object at 0x7da2590d6b60> name[original_kwargs]] begin[:]
if <ast.UnaryOp object at 0x7da18f09d390> begin[:]
variable[additionals] assign[=] binary_operation[name[ignores] <ast.BitAnd object at 0x7da2590d6b60> name[original_kwargs]]
<ast.Raise object at 0x7da18f09ca60>
<ast.ListComp object at 0x7da18f09f220>
variable[operators] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f09d870>, <ast.Name object at 0x7da18f09ef20>]]] in starred[call[name[userkwargs].items, parameter[]]] begin[:]
variable[rk] assign[=] name[key]
variable[agg] assign[=] constant[None]
if compare[call[name[key].find, parameter[constant[_]]] greater[>] <ast.UnaryOp object at 0x7da18f09f070>] begin[:]
<ast.Tuple object at 0x7da18f09dff0> assign[=] call[name[tuple], parameter[call[name[key].split, parameter[constant[_]]]]]
variable[seed] assign[=] call[name[self].arguments.get, parameter[name[rk], call[name[self].arguments.get, parameter[binary_operation[name[rk] + constant[[]]]]]]]
if name[seed] begin[:]
if compare[call[name[type], parameter[name[value]]] is name[list]] begin[:]
call[name[operators]][name[key]] assign[=] list[[]]
variable[new_values] assign[=] list[[]]
for taget[name[v]] in starred[name[value]] begin[:]
<ast.Tuple object at 0x7da18f09c580> assign[=] call[name[self]._operator, parameter[name[v], <ast.Starred object at 0x7da18f09e5f0>]]
call[name[new_values].append, parameter[name[v]]]
call[call[name[operators]][name[key]].append, parameter[<ast.IfExp object at 0x7da18f09e080>]]
call[name[userkwargs]][name[key]] assign[=] name[new_values]
if compare[constant[sortby] in name[userkwargs]] begin[:]
variable[seed] assign[=] call[name[self].arguments.get, parameter[call[call[name[userkwargs]][constant[sortby]].lower, parameter[]], call[name[self].arguments.get, parameter[binary_operation[call[call[name[userkwargs]][constant[sortby]].lower, parameter[]] + constant[[]]]]]]]
if name[seed] begin[:]
call[name[seed]][constant[id]] assign[=] call[name[str], parameter[call[call[name[userkwargs]][constant[sortby]].lower, parameter[]]]]
for taget[name[r]] in starred[call[name[set], parameter[name[requires]]]] begin[:]
if compare[call[name[userkwargs].get, parameter[name[r]]] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f09f430>
variable[parser] assign[=] call[name[valideer].parse, parameter[name[parser]]]
variable[validated] assign[=] call[name[parser].validate, parameter[name[userkwargs]]]
call[name[validated].update, parameter[name[self].network_kwargs]]
return[tuple[[<ast.Name object at 0x7da18f09d630>, <ast.Name object at 0x7da18f09c0d0>]]] | keyword[def] identifier[_harvest_validate] ( identifier[self] , identifier[userkwargs] ):
literal[string]
identifier[parser] ={}
identifier[userkwargs] . identifier[update] ( identifier[self] . identifier[network_kwargs] )
identifier[original_kwargs] = identifier[set] ( identifier[map] ( keyword[lambda] identifier[k] : identifier[k] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[k] . identifier[find] ( literal[string] )>- literal[int] keyword[else] identifier[k] , identifier[userkwargs] . identifier[keys] ()))
identifier[requires] =[]
keyword[for] identifier[key] keyword[in] identifier[userkwargs] . identifier[keys] ():
keyword[if] identifier[key] . identifier[find] ( literal[string] )> literal[int] :
identifier[agg] , identifier[base] = identifier[tuple] ( identifier[key] . identifier[split] ( literal[string] ))
keyword[if] identifier[base] keyword[in] identifier[userkwargs] :
keyword[if] identifier[type] ( identifier[userkwargs] [ identifier[base] ]) keyword[is] keyword[not] identifier[list] :
identifier[userkwargs] [ identifier[base] ]=[( keyword[None] , identifier[userkwargs] [ identifier[base] ])]
identifier[userkwargs] [ identifier[base] ]. identifier[append] (( identifier[agg] , identifier[userkwargs] . identifier[pop] ( identifier[key] )))
keyword[else] :
identifier[userkwargs] [ identifier[base] ]=[( identifier[agg] , identifier[userkwargs] . identifier[pop] ( identifier[key] ))]
keyword[for] identifier[key] , identifier[seed] keyword[in] identifier[self] . identifier[arguments] . identifier[iteritems] ():
keyword[if] identifier[seed] . identifier[get] ( literal[string] ) keyword[and] identifier[key] keyword[in] identifier[userkwargs] :
identifier[value] = identifier[userkwargs] . identifier[pop] ( identifier[key] ) keyword[if] identifier[key] keyword[in] identifier[userkwargs] keyword[else] identifier[NotImplemented]
identifier[oldkey] = identifier[key] + literal[string]
identifier[key] = identifier[seed] . identifier[get] ( literal[string] )
identifier[seed] = identifier[get] ( identifier[self] . identifier[arguments] , identifier[seed] . identifier[get] ( literal[string] ))
keyword[if] identifier[value] keyword[is] keyword[not] identifier[NotImplemented] :
keyword[if] identifier[key] keyword[in] identifier[userkwargs] :
keyword[raise] identifier[valideer] . identifier[ValidationError] ( literal[string] %( identifier[oldkey] , identifier[key] ), identifier[oldkey] )
identifier[userkwargs] [ identifier[key] ]= identifier[value]
keyword[if] identifier[key] . identifier[endswith] ( literal[string] ):
identifier[multi] = keyword[True]
identifier[key] = identifier[key] [:- literal[int] ]
keyword[else] :
identifier[multi] = keyword[False]
keyword[if] identifier[key] keyword[in] identifier[userkwargs] :
identifier[value] = identifier[userkwargs] . identifier[pop] ( identifier[key] )
keyword[elif] identifier[seed] . identifier[get] ( literal[string] ):
identifier[value] = identifier[userkwargs] . identifier[get] ( identifier[seed] . identifier[get] ( literal[string] ))
keyword[else] :
identifier[value] = identifier[seed] . identifier[get] ( literal[string] )
keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] identifier[value] ==[]:
keyword[if] identifier[seed] . identifier[get] ( literal[string] ):
keyword[raise] identifier[valideer] . identifier[ValidationError] ( literal[string] % identifier[key] , identifier[key] )
keyword[else] :
keyword[continue]
identifier[requires] . identifier[extend] ( identifier[array] ( identifier[get] ( identifier[seed] , literal[string] ,[])))
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[list] keyword[and] identifier[type] ( identifier[value] [ literal[int] ]) keyword[is] identifier[tuple] :
keyword[for] identifier[v] keyword[in] identifier[value] :
identifier[ud] , identifier[pd] = identifier[self] . identifier[_harvest_args] ( identifier[key] , identifier[seed] , identifier[v] , identifier[multi] )
identifier[userkwargs] . identifier[update] ( identifier[ud] )
identifier[parser] . identifier[update] ( identifier[pd] )
keyword[else] :
identifier[ud] , identifier[pd] = identifier[self] . identifier[_harvest_args] ( identifier[key] , identifier[seed] , identifier[value] , identifier[multi] )
identifier[userkwargs] . identifier[update] ( identifier[ud] )
identifier[parser] . identifier[update] ( identifier[pd] )
keyword[for] identifier[seed] keyword[in] identifier[self] . identifier[seeds] :
identifier[ignores] = identifier[set] ( identifier[array] ( identifier[get] ( identifier[seed] , literal[string] )))
keyword[if] identifier[ignores] :
keyword[if] identifier[ignores] & identifier[original_kwargs] :
keyword[if] keyword[not] identifier[get] ( identifier[seed] , literal[string] ):
identifier[additionals] = identifier[ignores] & identifier[original_kwargs]
keyword[raise] identifier[valideer] . identifier[ValidationError] ( literal[string] % literal[string] . identifier[join] ( identifier[additionals] ), identifier[additionals] )
[ identifier[userkwargs] . identifier[pop] ( identifier[key] ) keyword[for] identifier[key] keyword[in] identifier[ignores] keyword[if] identifier[key] keyword[in] identifier[userkwargs] ]
identifier[operators] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[userkwargs] . identifier[items] ():
identifier[rk] = identifier[key]
identifier[agg] = keyword[None]
keyword[if] identifier[key] . identifier[find] ( literal[string] )>- literal[int] :
identifier[agg] , identifier[rk] = identifier[tuple] ( identifier[key] . identifier[split] ( literal[string] ))
identifier[seed] = identifier[self] . identifier[arguments] . identifier[get] ( identifier[rk] , identifier[self] . identifier[arguments] . identifier[get] ( identifier[rk] + literal[string] ))
keyword[if] identifier[seed] :
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[list] :
identifier[operators] [ identifier[key] ]=[]
identifier[new_values] =[]
keyword[for] identifier[v] keyword[in] identifier[value] :
identifier[operator] , identifier[v] = identifier[self] . identifier[_operator] ( identifier[v] ,* identifier[seed] . identifier[get] ( literal[string] , literal[string] ). identifier[rsplit] ( literal[string] , literal[int] ))
identifier[new_values] . identifier[append] ( identifier[v] )
identifier[operators] [ identifier[key] ]. identifier[append] (( identifier[agg] , identifier[operator] ) keyword[if] identifier[agg] keyword[else] identifier[operator] )
identifier[userkwargs] [ identifier[key] ]= identifier[new_values]
keyword[else] :
identifier[operator] , identifier[value] = identifier[self] . identifier[_operator] ( identifier[value] ,* identifier[seed] . identifier[get] ( literal[string] , literal[string] ). identifier[rsplit] ( literal[string] , literal[int] ))
identifier[operators] [ identifier[key] ]=( identifier[agg] , identifier[operator] ) keyword[if] identifier[agg] keyword[else] identifier[operator]
identifier[userkwargs] [ identifier[key] ]= identifier[value]
keyword[if] literal[string] keyword[in] identifier[userkwargs] :
identifier[seed] = identifier[self] . identifier[arguments] . identifier[get] ( identifier[userkwargs] [ literal[string] ]. identifier[lower] (), identifier[self] . identifier[arguments] . identifier[get] ( identifier[userkwargs] [ literal[string] ]. identifier[lower] ()+ literal[string] ))
keyword[if] identifier[seed] :
identifier[seed] [ literal[string] ]= identifier[str] ( identifier[userkwargs] [ literal[string] ]. identifier[lower] ())
keyword[for] identifier[r] keyword[in] identifier[set] ( identifier[requires] ):
keyword[if] identifier[userkwargs] . identifier[get] ( identifier[r] ) keyword[is] keyword[None] :
keyword[raise] identifier[valideer] . identifier[ValidationError] ( literal[string] % identifier[r] , identifier[r] )
identifier[parser] = identifier[valideer] . identifier[parse] ( identifier[parser] , identifier[additional_properties] = keyword[False] )
identifier[validated] = identifier[parser] . identifier[validate] ( identifier[userkwargs] , identifier[adapt] = identifier[self] . identifier[navigator] . identifier[adapter] ())
identifier[validated] . identifier[update] ( identifier[self] . identifier[network_kwargs] )
keyword[return] identifier[operators] , identifier[validated] | def _harvest_validate(self, userkwargs):
"""Validate and Plant user provided arguments
- Go through and plants the seedlings
for any user arguments provided.
- Validate the arguments, cleaning and adapting (valideer wise)
- Extract negatives "!" arguments
"""
# the valideer to parse the
# user arguemnts when watering
parser = {}
userkwargs.update(self.network_kwargs)
# a simple set of original provided argument keys (used in IGNORES)
original_kwargs = set(map(lambda k: k.split('_')[1] if k.find('_') > -1 else k, userkwargs.keys()))
# list of columns that are required from seeds
requires = []
# -------------
# Clean up Aggs
# -------------
for key in userkwargs.keys():
# agg example: "avg_total", "max_tax"
if key.find('_') > 0:
(agg, base) = tuple(key.split('_'))
if base in userkwargs:
if type(userkwargs[base]) is not list:
userkwargs[base] = [(None, userkwargs[base])] # depends on [control=['if'], data=[]]
userkwargs[base].append((agg, userkwargs.pop(key))) # depends on [control=['if'], data=['base', 'userkwargs']]
else:
userkwargs[base] = [(agg, userkwargs.pop(key))] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# -----------------
# Process Arguments
# -----------------
for (key, seed) in self.arguments.iteritems():
# --------------
# Argument Alias
# --------------
if seed.get('alias') and key in userkwargs:
# pop the value form the user kwargs (to change the key later)
value = userkwargs.pop(key) if key in userkwargs else NotImplemented
# for duplicate keys
oldkey = key + ''
# change the key
key = seed.get('alias')
# change the seed
seed = get(self.arguments, seed.get('alias'))
# set the new key:value
if value is not NotImplemented:
if key in userkwargs:
raise valideer.ValidationError('Argument alias already specified for `%s` via `%s`' % (oldkey, key), oldkey) # depends on [control=['if'], data=['key']]
userkwargs[key] = value # depends on [control=['if'], data=['value']] # depends on [control=['if'], data=[]]
# can provide multiple arguments
if key.endswith('[]'):
multi = True
key = key[:-2] # depends on [control=['if'], data=[]]
else:
multi = False
# get value(s) from user
if key in userkwargs:
value = userkwargs.pop(key) # depends on [control=['if'], data=['key', 'userkwargs']]
elif seed.get('copy'):
value = userkwargs.get(seed.get('copy')) # depends on [control=['if'], data=[]]
else:
value = seed.get('default')
# no argument provided, lets continue)
if value is None or value == []:
if seed.get('required'):
raise valideer.ValidationError('missing required property: %s' % key, key) # depends on [control=['if'], data=[]]
else:
continue # depends on [control=['if'], data=[]]
# add requires
requires.extend(array(get(seed, 'requires', [])))
# -----------
# Inheritance
# -----------
# not permited from arguements yet. would need to happen above the ""PROCESS ARGUMENT"" block
# self._inherit(*array(get(seed, 'inherit', [])))
if type(value) is list and type(value[0]) is tuple:
# complex
for v in value:
(ud, pd) = self._harvest_args(key, seed, v, multi)
userkwargs.update(ud)
parser.update(pd) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]]
else:
(ud, pd) = self._harvest_args(key, seed, value, multi)
userkwargs.update(ud)
parser.update(pd) # depends on [control=['for'], data=[]]
# ------------
# Ignored Keys
# ------------
for seed in self.seeds:
ignores = set(array(get(seed, 'ignore')))
if ignores:
if ignores & original_kwargs:
if not get(seed, 'silent'):
additionals = ignores & original_kwargs
raise valideer.ValidationError('additional properties: %s' % ','.join(additionals), additionals) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
[userkwargs.pop(key) for key in ignores if key in userkwargs] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seed']]
# -------------------------
# Custom Operators (part 1)
# -------------------------
operators = {}
for (key, value) in userkwargs.items():
rk = key
agg = None
if key.find('_') > -1:
(agg, rk) = tuple(key.split('_')) # depends on [control=['if'], data=[]]
seed = self.arguments.get(rk, self.arguments.get(rk + '[]'))
if seed:
if type(value) is list:
operators[key] = []
# need to remove the operator for validating
new_values = []
for v in value:
(operator, v) = self._operator(v, *seed.get('column', '').rsplit('::', 1))
new_values.append(v)
operators[key].append((agg, operator) if agg else operator) # depends on [control=['for'], data=['v']]
userkwargs[key] = new_values # depends on [control=['if'], data=[]]
else:
(operator, value) = self._operator(value, *seed.get('column', '').rsplit('::', 1))
operators[key] = (agg, operator) if agg else operator
userkwargs[key] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# -----------------
# Plant Sort Method
# -----------------
if 'sortby' in userkwargs:
seed = self.arguments.get(userkwargs['sortby'].lower(), self.arguments.get(userkwargs['sortby'].lower() + '[]'))
if seed:
seed['id'] = str(userkwargs['sortby'].lower()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['userkwargs']]
for r in set(requires):
if userkwargs.get(r) is None:
raise valideer.ValidationError('required property not set: %s' % r, r) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
# --------
# Validate
# --------
parser = valideer.parse(parser, additional_properties=False)
validated = parser.validate(userkwargs, adapt=self.navigator.adapter())
validated.update(self.network_kwargs)
# operators validated
# --------------------------- | --------------------------------
# { {
# "type": ["!", "!"], "type": ['a', 'b'],
# "total": "<", "total": "50",
# "tax": ("avg, ">"), "tax": "1",
# "time": None "time": "2014"
# } }
return (operators, validated) |
def uncurry(f):
"""Convert a curried function into a function on tuples
(of positional arguments) and dictionaries (of keyword arguments).
"""
ensure_callable(f)
result = lambda args=(), kwargs=None: f(*args, **(kwargs or {}))
functools.update_wrapper(result, f, ('__name__', '__module__'))
return result | def function[uncurry, parameter[f]]:
constant[Convert a curried function into a function on tuples
(of positional arguments) and dictionaries (of keyword arguments).
]
call[name[ensure_callable], parameter[name[f]]]
variable[result] assign[=] <ast.Lambda object at 0x7da18f09ca00>
call[name[functools].update_wrapper, parameter[name[result], name[f], tuple[[<ast.Constant object at 0x7da18f09f670>, <ast.Constant object at 0x7da18f09cf70>]]]]
return[name[result]] | keyword[def] identifier[uncurry] ( identifier[f] ):
literal[string]
identifier[ensure_callable] ( identifier[f] )
identifier[result] = keyword[lambda] identifier[args] =(), identifier[kwargs] = keyword[None] : identifier[f] (* identifier[args] ,**( identifier[kwargs] keyword[or] {}))
identifier[functools] . identifier[update_wrapper] ( identifier[result] , identifier[f] ,( literal[string] , literal[string] ))
keyword[return] identifier[result] | def uncurry(f):
"""Convert a curried function into a function on tuples
(of positional arguments) and dictionaries (of keyword arguments).
"""
ensure_callable(f)
result = lambda args=(), kwargs=None: f(*args, **kwargs or {})
functools.update_wrapper(result, f, ('__name__', '__module__'))
return result |
def download_file_maybe_extract(url, directory, filename=None, extension=None, check_files=[]):
""" Download the file at ``url`` to ``directory``. Extract to ``directory`` if tar or zip.
Args:
url (str): Url of file.
directory (str): Directory to download to.
filename (str, optional): Name of the file to download; Otherwise, a filename is extracted
from the url.
extension (str, optional): Extension of the file; Otherwise, attempts to extract extension
from the filename.
check_files (list of str): Check if these files exist, ensuring the download succeeded.
If these files exist before the download, the download is skipped.
Returns:
(str): Filename of download file.
Raises:
ValueError: Error if one of the ``check_files`` are not found following the download.
"""
if filename is None:
filename = _get_filename_from_url(url)
filepath = os.path.join(directory, filename)
check_files = [os.path.join(directory, f) for f in check_files]
if len(check_files) > 0 and _check_download(*check_files):
return filepath
if not os.path.isdir(directory):
os.makedirs(directory)
logger.info('Downloading {}'.format(filename))
# Download
if 'drive.google.com' in url:
_download_file_from_drive(filepath, url)
else:
with tqdm(unit='B', unit_scale=True, miniters=1, desc=filename) as t:
urllib.request.urlretrieve(url, filename=filepath, reporthook=_reporthook(t))
_maybe_extract(compressed_filename=filepath, directory=directory, extension=extension)
if not _check_download(*check_files):
raise ValueError('[DOWNLOAD FAILED] `*check_files` not found')
return filepath | def function[download_file_maybe_extract, parameter[url, directory, filename, extension, check_files]]:
constant[ Download the file at ``url`` to ``directory``. Extract to ``directory`` if tar or zip.
Args:
url (str): Url of file.
directory (str): Directory to download to.
filename (str, optional): Name of the file to download; Otherwise, a filename is extracted
from the url.
extension (str, optional): Extension of the file; Otherwise, attempts to extract extension
from the filename.
check_files (list of str): Check if these files exist, ensuring the download succeeded.
If these files exist before the download, the download is skipped.
Returns:
(str): Filename of download file.
Raises:
ValueError: Error if one of the ``check_files`` are not found following the download.
]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] call[name[_get_filename_from_url], parameter[name[url]]]
variable[filepath] assign[=] call[name[os].path.join, parameter[name[directory], name[filename]]]
variable[check_files] assign[=] <ast.ListComp object at 0x7da18dc075b0>
if <ast.BoolOp object at 0x7da18dc04be0> begin[:]
return[name[filepath]]
if <ast.UnaryOp object at 0x7da18dc053f0> begin[:]
call[name[os].makedirs, parameter[name[directory]]]
call[name[logger].info, parameter[call[constant[Downloading {}].format, parameter[name[filename]]]]]
if compare[constant[drive.google.com] in name[url]] begin[:]
call[name[_download_file_from_drive], parameter[name[filepath], name[url]]]
call[name[_maybe_extract], parameter[]]
if <ast.UnaryOp object at 0x7da18dc04f40> begin[:]
<ast.Raise object at 0x7da18dc057b0>
return[name[filepath]] | keyword[def] identifier[download_file_maybe_extract] ( identifier[url] , identifier[directory] , identifier[filename] = keyword[None] , identifier[extension] = keyword[None] , identifier[check_files] =[]):
literal[string]
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[_get_filename_from_url] ( identifier[url] )
identifier[filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[filename] )
identifier[check_files] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[check_files] ]
keyword[if] identifier[len] ( identifier[check_files] )> literal[int] keyword[and] identifier[_check_download] (* identifier[check_files] ):
keyword[return] identifier[filepath]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[directory] ):
identifier[os] . identifier[makedirs] ( identifier[directory] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[filename] ))
keyword[if] literal[string] keyword[in] identifier[url] :
identifier[_download_file_from_drive] ( identifier[filepath] , identifier[url] )
keyword[else] :
keyword[with] identifier[tqdm] ( identifier[unit] = literal[string] , identifier[unit_scale] = keyword[True] , identifier[miniters] = literal[int] , identifier[desc] = identifier[filename] ) keyword[as] identifier[t] :
identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] , identifier[filename] = identifier[filepath] , identifier[reporthook] = identifier[_reporthook] ( identifier[t] ))
identifier[_maybe_extract] ( identifier[compressed_filename] = identifier[filepath] , identifier[directory] = identifier[directory] , identifier[extension] = identifier[extension] )
keyword[if] keyword[not] identifier[_check_download] (* identifier[check_files] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[filepath] | def download_file_maybe_extract(url, directory, filename=None, extension=None, check_files=[]):
""" Download the file at ``url`` to ``directory``. Extract to ``directory`` if tar or zip.
Args:
url (str): Url of file.
directory (str): Directory to download to.
filename (str, optional): Name of the file to download; Otherwise, a filename is extracted
from the url.
extension (str, optional): Extension of the file; Otherwise, attempts to extract extension
from the filename.
check_files (list of str): Check if these files exist, ensuring the download succeeded.
If these files exist before the download, the download is skipped.
Returns:
(str): Filename of download file.
Raises:
ValueError: Error if one of the ``check_files`` are not found following the download.
"""
if filename is None:
filename = _get_filename_from_url(url) # depends on [control=['if'], data=['filename']]
filepath = os.path.join(directory, filename)
check_files = [os.path.join(directory, f) for f in check_files]
if len(check_files) > 0 and _check_download(*check_files):
return filepath # depends on [control=['if'], data=[]]
if not os.path.isdir(directory):
os.makedirs(directory) # depends on [control=['if'], data=[]]
logger.info('Downloading {}'.format(filename))
# Download
if 'drive.google.com' in url:
_download_file_from_drive(filepath, url) # depends on [control=['if'], data=['url']]
else:
with tqdm(unit='B', unit_scale=True, miniters=1, desc=filename) as t:
urllib.request.urlretrieve(url, filename=filepath, reporthook=_reporthook(t)) # depends on [control=['with'], data=['t']]
_maybe_extract(compressed_filename=filepath, directory=directory, extension=extension)
if not _check_download(*check_files):
raise ValueError('[DOWNLOAD FAILED] `*check_files` not found') # depends on [control=['if'], data=[]]
return filepath |
def ins(mnemonic):
"""Lookup instruction information.
Lookup an instruction by its mnemonic.
"""
try:
opcode = bytecode.opcode_table[mnemonic]
except KeyError:
click.secho(u'No definition found.', fg='red')
return
click.echo(u'{mnemonic} (0x{op})'.format(
mnemonic=click.style(opcode['mnemonic'], fg='green', underline=True),
op=click.style(format(opcode['op'], '02x'), fg='green')
))
if opcode.get('desc'):
click.secho('Description:', fg='yellow')
click.echo(opcode['desc'])
if opcode['can_be_wide']:
click.echo(u'This instruction can be prefixed by the WIDE opcode.')
if opcode.get('runtime'):
click.secho('Possible runtime exceptions:', fg='yellow')
for runtime_exception in opcode['runtime']:
click.echo('- {runtime_exception}'.format(
runtime_exception=click.style(runtime_exception, fg='red')
))
if opcode['operands']:
click.secho(u'Operand Format:', fg='yellow')
for operand_fmt, operand_type in opcode['operands']:
click.echo(u'- {ty} as a {fmt}'.format(
ty=click.style(operand_type.name, fg='yellow'),
fmt=click.style(operand_fmt.name, fg='yellow')
))
elif opcode['op'] in (0xAB, 0xAA, 0xC4):
# lookup[table|switch] and WIDE.
click.secho(u'\nOperand Format:', fg='yellow')
click.echo(
u'This is a special-case opcode with variable operand parsing.'
) | def function[ins, parameter[mnemonic]]:
constant[Lookup instruction information.
Lookup an instruction by its mnemonic.
]
<ast.Try object at 0x7da1b25e8af0>
call[name[click].echo, parameter[call[constant[{mnemonic} (0x{op})].format, parameter[]]]]
if call[name[opcode].get, parameter[constant[desc]]] begin[:]
call[name[click].secho, parameter[constant[Description:]]]
call[name[click].echo, parameter[call[name[opcode]][constant[desc]]]]
if call[name[opcode]][constant[can_be_wide]] begin[:]
call[name[click].echo, parameter[constant[This instruction can be prefixed by the WIDE opcode.]]]
if call[name[opcode].get, parameter[constant[runtime]]] begin[:]
call[name[click].secho, parameter[constant[Possible runtime exceptions:]]]
for taget[name[runtime_exception]] in starred[call[name[opcode]][constant[runtime]]] begin[:]
call[name[click].echo, parameter[call[constant[- {runtime_exception}].format, parameter[]]]]
if call[name[opcode]][constant[operands]] begin[:]
call[name[click].secho, parameter[constant[Operand Format:]]]
for taget[tuple[[<ast.Name object at 0x7da1b25e9870>, <ast.Name object at 0x7da1b25e97e0>]]] in starred[call[name[opcode]][constant[operands]]] begin[:]
call[name[click].echo, parameter[call[constant[- {ty} as a {fmt}].format, parameter[]]]] | keyword[def] identifier[ins] ( identifier[mnemonic] ):
literal[string]
keyword[try] :
identifier[opcode] = identifier[bytecode] . identifier[opcode_table] [ identifier[mnemonic] ]
keyword[except] identifier[KeyError] :
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
keyword[return]
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[mnemonic] = identifier[click] . identifier[style] ( identifier[opcode] [ literal[string] ], identifier[fg] = literal[string] , identifier[underline] = keyword[True] ),
identifier[op] = identifier[click] . identifier[style] ( identifier[format] ( identifier[opcode] [ literal[string] ], literal[string] ), identifier[fg] = literal[string] )
))
keyword[if] identifier[opcode] . identifier[get] ( literal[string] ):
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
identifier[click] . identifier[echo] ( identifier[opcode] [ literal[string] ])
keyword[if] identifier[opcode] [ literal[string] ]:
identifier[click] . identifier[echo] ( literal[string] )
keyword[if] identifier[opcode] . identifier[get] ( literal[string] ):
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
keyword[for] identifier[runtime_exception] keyword[in] identifier[opcode] [ literal[string] ]:
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[runtime_exception] = identifier[click] . identifier[style] ( identifier[runtime_exception] , identifier[fg] = literal[string] )
))
keyword[if] identifier[opcode] [ literal[string] ]:
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
keyword[for] identifier[operand_fmt] , identifier[operand_type] keyword[in] identifier[opcode] [ literal[string] ]:
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[ty] = identifier[click] . identifier[style] ( identifier[operand_type] . identifier[name] , identifier[fg] = literal[string] ),
identifier[fmt] = identifier[click] . identifier[style] ( identifier[operand_fmt] . identifier[name] , identifier[fg] = literal[string] )
))
keyword[elif] identifier[opcode] [ literal[string] ] keyword[in] ( literal[int] , literal[int] , literal[int] ):
identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] )
identifier[click] . identifier[echo] (
literal[string]
) | def ins(mnemonic):
"""Lookup instruction information.
Lookup an instruction by its mnemonic.
"""
try:
opcode = bytecode.opcode_table[mnemonic] # depends on [control=['try'], data=[]]
except KeyError:
click.secho(u'No definition found.', fg='red')
return # depends on [control=['except'], data=[]]
click.echo(u'{mnemonic} (0x{op})'.format(mnemonic=click.style(opcode['mnemonic'], fg='green', underline=True), op=click.style(format(opcode['op'], '02x'), fg='green')))
if opcode.get('desc'):
click.secho('Description:', fg='yellow')
click.echo(opcode['desc']) # depends on [control=['if'], data=[]]
if opcode['can_be_wide']:
click.echo(u'This instruction can be prefixed by the WIDE opcode.') # depends on [control=['if'], data=[]]
if opcode.get('runtime'):
click.secho('Possible runtime exceptions:', fg='yellow')
for runtime_exception in opcode['runtime']:
click.echo('- {runtime_exception}'.format(runtime_exception=click.style(runtime_exception, fg='red'))) # depends on [control=['for'], data=['runtime_exception']] # depends on [control=['if'], data=[]]
if opcode['operands']:
click.secho(u'Operand Format:', fg='yellow')
for (operand_fmt, operand_type) in opcode['operands']:
click.echo(u'- {ty} as a {fmt}'.format(ty=click.style(operand_type.name, fg='yellow'), fmt=click.style(operand_fmt.name, fg='yellow'))) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif opcode['op'] in (171, 170, 196):
# lookup[table|switch] and WIDE.
click.secho(u'\nOperand Format:', fg='yellow')
click.echo(u'This is a special-case opcode with variable operand parsing.') # depends on [control=['if'], data=[]] |
def print_role(role, detailed=True):
"""Pretty prints the given role"""
if detailed:
print(colors.yellow(role.get('fullname')))
else:
print(" Role: {0}".format(role.get('fullname')))
if detailed:
print(" description: {0}".format(role.get('description')))
if 'default_attributes' in role:
print(" default_attributes:")
_pprint(role['default_attributes'])
if 'override_attributes' in role:
print(" override_attributes:")
_pprint(role['override_attributes'])
if detailed:
print(" run_list: {0}".format(role.get('run_list')))
print("") | def function[print_role, parameter[role, detailed]]:
constant[Pretty prints the given role]
if name[detailed] begin[:]
call[name[print], parameter[call[name[colors].yellow, parameter[call[name[role].get, parameter[constant[fullname]]]]]]]
if name[detailed] begin[:]
call[name[print], parameter[call[constant[ description: {0}].format, parameter[call[name[role].get, parameter[constant[description]]]]]]]
if compare[constant[default_attributes] in name[role]] begin[:]
call[name[print], parameter[constant[ default_attributes:]]]
call[name[_pprint], parameter[call[name[role]][constant[default_attributes]]]]
if compare[constant[override_attributes] in name[role]] begin[:]
call[name[print], parameter[constant[ override_attributes:]]]
call[name[_pprint], parameter[call[name[role]][constant[override_attributes]]]]
if name[detailed] begin[:]
call[name[print], parameter[call[constant[ run_list: {0}].format, parameter[call[name[role].get, parameter[constant[run_list]]]]]]]
call[name[print], parameter[constant[]]] | keyword[def] identifier[print_role] ( identifier[role] , identifier[detailed] = keyword[True] ):
literal[string]
keyword[if] identifier[detailed] :
identifier[print] ( identifier[colors] . identifier[yellow] ( identifier[role] . identifier[get] ( literal[string] )))
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[role] . identifier[get] ( literal[string] )))
keyword[if] identifier[detailed] :
identifier[print] ( literal[string] . identifier[format] ( identifier[role] . identifier[get] ( literal[string] )))
keyword[if] literal[string] keyword[in] identifier[role] :
identifier[print] ( literal[string] )
identifier[_pprint] ( identifier[role] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[role] :
identifier[print] ( literal[string] )
identifier[_pprint] ( identifier[role] [ literal[string] ])
keyword[if] identifier[detailed] :
identifier[print] ( literal[string] . identifier[format] ( identifier[role] . identifier[get] ( literal[string] )))
identifier[print] ( literal[string] ) | def print_role(role, detailed=True):
"""Pretty prints the given role"""
if detailed:
print(colors.yellow(role.get('fullname'))) # depends on [control=['if'], data=[]]
else:
print(' Role: {0}'.format(role.get('fullname')))
if detailed:
print(' description: {0}'.format(role.get('description'))) # depends on [control=['if'], data=[]]
if 'default_attributes' in role:
print(' default_attributes:')
_pprint(role['default_attributes']) # depends on [control=['if'], data=['role']]
if 'override_attributes' in role:
print(' override_attributes:')
_pprint(role['override_attributes']) # depends on [control=['if'], data=['role']]
if detailed:
print(' run_list: {0}'.format(role.get('run_list'))) # depends on [control=['if'], data=[]]
print('') |
def get_repr(expr, multiline=False):
"""
Build a repr string for ``expr`` from its vars and signature.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
)
"""
signature = _get_object_signature(expr)
if signature is None:
return "{}()".format(type(expr).__name__)
defaults = {}
for name, parameter in signature.parameters.items():
if parameter.default is not inspect._empty:
defaults[name] = parameter.default
args, var_args, kwargs = get_vars(expr)
args_parts = collections.OrderedDict()
var_args_parts = []
kwargs_parts = {}
has_lines = multiline
parts = []
# Format keyword-optional arguments.
# print(type(expr), args)
for i, (key, value) in enumerate(args.items()):
arg_repr = _dispatch_formatting(value)
if "\n" in arg_repr:
has_lines = True
args_parts[key] = arg_repr
# Format *args
for arg in var_args:
arg_repr = _dispatch_formatting(arg)
if "\n" in arg_repr:
has_lines = True
var_args_parts.append(arg_repr)
# Format **kwargs
for key, value in sorted(kwargs.items()):
if key in defaults and value == defaults[key]:
continue
value = _dispatch_formatting(value)
arg_repr = "{}={}".format(key, value)
has_lines = True
kwargs_parts[key] = arg_repr
for _, part in args_parts.items():
parts.append(part)
parts.extend(var_args_parts)
for _, part in sorted(kwargs_parts.items()):
parts.append(part)
# If we should format on multiple lines, add the appropriate formatting.
if has_lines and parts:
for i, part in enumerate(parts):
parts[i] = "\n".join(" " + line for line in part.split("\n"))
parts.append(" )")
parts = ",\n".join(parts)
return "{}(\n{}".format(type(expr).__name__, parts)
parts = ", ".join(parts)
return "{}({})".format(type(expr).__name__, parts) | def function[get_repr, parameter[expr, multiline]]:
constant[
Build a repr string for ``expr`` from its vars and signature.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
)
]
variable[signature] assign[=] call[name[_get_object_signature], parameter[name[expr]]]
if compare[name[signature] is constant[None]] begin[:]
return[call[constant[{}()].format, parameter[call[name[type], parameter[name[expr]]].__name__]]]
variable[defaults] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c6aa170>, <ast.Name object at 0x7da20c6a9480>]]] in starred[call[name[signature].parameters.items, parameter[]]] begin[:]
if compare[name[parameter].default is_not name[inspect]._empty] begin[:]
call[name[defaults]][name[name]] assign[=] name[parameter].default
<ast.Tuple object at 0x7da20c6ab3d0> assign[=] call[name[get_vars], parameter[name[expr]]]
variable[args_parts] assign[=] call[name[collections].OrderedDict, parameter[]]
variable[var_args_parts] assign[=] list[[]]
variable[kwargs_parts] assign[=] dictionary[[], []]
variable[has_lines] assign[=] name[multiline]
variable[parts] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6aa0e0>, <ast.Tuple object at 0x7da20c6aaf80>]]] in starred[call[name[enumerate], parameter[call[name[args].items, parameter[]]]]] begin[:]
variable[arg_repr] assign[=] call[name[_dispatch_formatting], parameter[name[value]]]
if compare[constant[
] in name[arg_repr]] begin[:]
variable[has_lines] assign[=] constant[True]
call[name[args_parts]][name[key]] assign[=] name[arg_repr]
for taget[name[arg]] in starred[name[var_args]] begin[:]
variable[arg_repr] assign[=] call[name[_dispatch_formatting], parameter[name[arg]]]
if compare[constant[
] in name[arg_repr]] begin[:]
variable[has_lines] assign[=] constant[True]
call[name[var_args_parts].append, parameter[name[arg_repr]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a99c0>, <ast.Name object at 0x7da20c6aa500>]]] in starred[call[name[sorted], parameter[call[name[kwargs].items, parameter[]]]]] begin[:]
if <ast.BoolOp object at 0x7da20c6ab250> begin[:]
continue
variable[value] assign[=] call[name[_dispatch_formatting], parameter[name[value]]]
variable[arg_repr] assign[=] call[constant[{}={}].format, parameter[name[key], name[value]]]
variable[has_lines] assign[=] constant[True]
call[name[kwargs_parts]][name[key]] assign[=] name[arg_repr]
for taget[tuple[[<ast.Name object at 0x7da20c6ab5b0>, <ast.Name object at 0x7da20c6a8760>]]] in starred[call[name[args_parts].items, parameter[]]] begin[:]
call[name[parts].append, parameter[name[part]]]
call[name[parts].extend, parameter[name[var_args_parts]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a9240>, <ast.Name object at 0x7da20c6aa470>]]] in starred[call[name[sorted], parameter[call[name[kwargs_parts].items, parameter[]]]]] begin[:]
call[name[parts].append, parameter[name[part]]]
if <ast.BoolOp object at 0x7da20c6a8100> begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6a9b40>, <ast.Name object at 0x7da20c6a9b70>]]] in starred[call[name[enumerate], parameter[name[parts]]]] begin[:]
call[name[parts]][name[i]] assign[=] call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da20c6abd30>]]
call[name[parts].append, parameter[constant[ )]]]
variable[parts] assign[=] call[constant[,
].join, parameter[name[parts]]]
return[call[constant[{}(
{}].format, parameter[call[name[type], parameter[name[expr]]].__name__, name[parts]]]]
variable[parts] assign[=] call[constant[, ].join, parameter[name[parts]]]
return[call[constant[{}({})].format, parameter[call[name[type], parameter[name[expr]]].__name__, name[parts]]]] | keyword[def] identifier[get_repr] ( identifier[expr] , identifier[multiline] = keyword[False] ):
literal[string]
identifier[signature] = identifier[_get_object_signature] ( identifier[expr] )
keyword[if] identifier[signature] keyword[is] keyword[None] :
keyword[return] literal[string] . identifier[format] ( identifier[type] ( identifier[expr] ). identifier[__name__] )
identifier[defaults] ={}
keyword[for] identifier[name] , identifier[parameter] keyword[in] identifier[signature] . identifier[parameters] . identifier[items] ():
keyword[if] identifier[parameter] . identifier[default] keyword[is] keyword[not] identifier[inspect] . identifier[_empty] :
identifier[defaults] [ identifier[name] ]= identifier[parameter] . identifier[default]
identifier[args] , identifier[var_args] , identifier[kwargs] = identifier[get_vars] ( identifier[expr] )
identifier[args_parts] = identifier[collections] . identifier[OrderedDict] ()
identifier[var_args_parts] =[]
identifier[kwargs_parts] ={}
identifier[has_lines] = identifier[multiline]
identifier[parts] =[]
keyword[for] identifier[i] ,( identifier[key] , identifier[value] ) keyword[in] identifier[enumerate] ( identifier[args] . identifier[items] ()):
identifier[arg_repr] = identifier[_dispatch_formatting] ( identifier[value] )
keyword[if] literal[string] keyword[in] identifier[arg_repr] :
identifier[has_lines] = keyword[True]
identifier[args_parts] [ identifier[key] ]= identifier[arg_repr]
keyword[for] identifier[arg] keyword[in] identifier[var_args] :
identifier[arg_repr] = identifier[_dispatch_formatting] ( identifier[arg] )
keyword[if] literal[string] keyword[in] identifier[arg_repr] :
identifier[has_lines] = keyword[True]
identifier[var_args_parts] . identifier[append] ( identifier[arg_repr] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[kwargs] . identifier[items] ()):
keyword[if] identifier[key] keyword[in] identifier[defaults] keyword[and] identifier[value] == identifier[defaults] [ identifier[key] ]:
keyword[continue]
identifier[value] = identifier[_dispatch_formatting] ( identifier[value] )
identifier[arg_repr] = literal[string] . identifier[format] ( identifier[key] , identifier[value] )
identifier[has_lines] = keyword[True]
identifier[kwargs_parts] [ identifier[key] ]= identifier[arg_repr]
keyword[for] identifier[_] , identifier[part] keyword[in] identifier[args_parts] . identifier[items] ():
identifier[parts] . identifier[append] ( identifier[part] )
identifier[parts] . identifier[extend] ( identifier[var_args_parts] )
keyword[for] identifier[_] , identifier[part] keyword[in] identifier[sorted] ( identifier[kwargs_parts] . identifier[items] ()):
identifier[parts] . identifier[append] ( identifier[part] )
keyword[if] identifier[has_lines] keyword[and] identifier[parts] :
keyword[for] identifier[i] , identifier[part] keyword[in] identifier[enumerate] ( identifier[parts] ):
identifier[parts] [ identifier[i] ]= literal[string] . identifier[join] ( literal[string] + identifier[line] keyword[for] identifier[line] keyword[in] identifier[part] . identifier[split] ( literal[string] ))
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] = literal[string] . identifier[join] ( identifier[parts] )
keyword[return] literal[string] . identifier[format] ( identifier[type] ( identifier[expr] ). identifier[__name__] , identifier[parts] )
identifier[parts] = literal[string] . identifier[join] ( identifier[parts] )
keyword[return] literal[string] . identifier[format] ( identifier[type] ( identifier[expr] ). identifier[__name__] , identifier[parts] ) | def get_repr(expr, multiline=False):
"""
Build a repr string for ``expr`` from its vars and signature.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
)
"""
signature = _get_object_signature(expr)
if signature is None:
return '{}()'.format(type(expr).__name__) # depends on [control=['if'], data=[]]
defaults = {}
for (name, parameter) in signature.parameters.items():
if parameter.default is not inspect._empty:
defaults[name] = parameter.default # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
(args, var_args, kwargs) = get_vars(expr)
args_parts = collections.OrderedDict()
var_args_parts = []
kwargs_parts = {}
has_lines = multiline
parts = []
# Format keyword-optional arguments.
# print(type(expr), args)
for (i, (key, value)) in enumerate(args.items()):
arg_repr = _dispatch_formatting(value)
if '\n' in arg_repr:
has_lines = True # depends on [control=['if'], data=[]]
args_parts[key] = arg_repr # depends on [control=['for'], data=[]]
# Format *args
for arg in var_args:
arg_repr = _dispatch_formatting(arg)
if '\n' in arg_repr:
has_lines = True # depends on [control=['if'], data=[]]
var_args_parts.append(arg_repr) # depends on [control=['for'], data=['arg']]
# Format **kwargs
for (key, value) in sorted(kwargs.items()):
if key in defaults and value == defaults[key]:
continue # depends on [control=['if'], data=[]]
value = _dispatch_formatting(value)
arg_repr = '{}={}'.format(key, value)
has_lines = True
kwargs_parts[key] = arg_repr # depends on [control=['for'], data=[]]
for (_, part) in args_parts.items():
parts.append(part) # depends on [control=['for'], data=[]]
parts.extend(var_args_parts)
for (_, part) in sorted(kwargs_parts.items()):
parts.append(part) # depends on [control=['for'], data=[]]
# If we should format on multiple lines, add the appropriate formatting.
if has_lines and parts:
for (i, part) in enumerate(parts):
parts[i] = '\n'.join((' ' + line for line in part.split('\n'))) # depends on [control=['for'], data=[]]
parts.append(' )')
parts = ',\n'.join(parts)
return '{}(\n{}'.format(type(expr).__name__, parts) # depends on [control=['if'], data=[]]
parts = ', '.join(parts)
return '{}({})'.format(type(expr).__name__, parts) |
def remove(self, flag, extra):
"""Remove Slackware binary packages
"""
self.flag = flag
self.extra = extra
self.dep_path = self.meta.log_path + "dep/"
dependencies, rmv_list = [], []
self.removed = self._view_removed()
if not self.removed:
print("") # new line at end
else:
msg = "package"
if len(self.removed) > 1:
msg = msg + "s"
try:
if self.meta.default_answer in ["y", "Y"]:
remove_pkg = self.meta.default_answer
else:
remove_pkg = raw_input(
"\nAre you sure to remove {0} {1} [y/N]? ".format(
str(len(self.removed)), msg))
except EOFError:
print("") # new line at exit
raise SystemExit()
if remove_pkg in ["y", "Y"]:
self._check_if_used(self.binary)
for rmv in self.removed:
# If package build and install with "slpkg -s sbo <package>"
# then look log file for dependencies in /var/log/slpkg/dep,
# read and remove all else remove only the package.
if (os.path.isfile(self.dep_path + rmv) and
self.meta.del_deps in ["on", "ON"] or
os.path.isfile(self.dep_path + rmv) and
"--deps" in self.extra):
dependencies = self._view_deps(self.dep_path, rmv)
if dependencies and self._rmv_deps_answer() in ["y",
"Y"]:
rmv_list += self._rmv_deps(dependencies, rmv)
else:
rmv_list += self._rmv_pkg(rmv)
else:
rmv_list += self._rmv_pkg(rmv)
# Prints all removed packages
self._reference_rmvs(rmv_list) | def function[remove, parameter[self, flag, extra]]:
constant[Remove Slackware binary packages
]
name[self].flag assign[=] name[flag]
name[self].extra assign[=] name[extra]
name[self].dep_path assign[=] binary_operation[name[self].meta.log_path + constant[dep/]]
<ast.Tuple object at 0x7da18f7217e0> assign[=] tuple[[<ast.List object at 0x7da18f721f60>, <ast.List object at 0x7da18f7213c0>]]
name[self].removed assign[=] call[name[self]._view_removed, parameter[]]
if <ast.UnaryOp object at 0x7da18f722770> begin[:]
call[name[print], parameter[constant[]]] | keyword[def] identifier[remove] ( identifier[self] , identifier[flag] , identifier[extra] ):
literal[string]
identifier[self] . identifier[flag] = identifier[flag]
identifier[self] . identifier[extra] = identifier[extra]
identifier[self] . identifier[dep_path] = identifier[self] . identifier[meta] . identifier[log_path] + literal[string]
identifier[dependencies] , identifier[rmv_list] =[],[]
identifier[self] . identifier[removed] = identifier[self] . identifier[_view_removed] ()
keyword[if] keyword[not] identifier[self] . identifier[removed] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[msg] = literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[removed] )> literal[int] :
identifier[msg] = identifier[msg] + literal[string]
keyword[try] :
keyword[if] identifier[self] . identifier[meta] . identifier[default_answer] keyword[in] [ literal[string] , literal[string] ]:
identifier[remove_pkg] = identifier[self] . identifier[meta] . identifier[default_answer]
keyword[else] :
identifier[remove_pkg] = identifier[raw_input] (
literal[string] . identifier[format] (
identifier[str] ( identifier[len] ( identifier[self] . identifier[removed] )), identifier[msg] ))
keyword[except] identifier[EOFError] :
identifier[print] ( literal[string] )
keyword[raise] identifier[SystemExit] ()
keyword[if] identifier[remove_pkg] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[_check_if_used] ( identifier[self] . identifier[binary] )
keyword[for] identifier[rmv] keyword[in] identifier[self] . identifier[removed] :
keyword[if] ( identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[dep_path] + identifier[rmv] ) keyword[and]
identifier[self] . identifier[meta] . identifier[del_deps] keyword[in] [ literal[string] , literal[string] ] keyword[or]
identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[dep_path] + identifier[rmv] ) keyword[and]
literal[string] keyword[in] identifier[self] . identifier[extra] ):
identifier[dependencies] = identifier[self] . identifier[_view_deps] ( identifier[self] . identifier[dep_path] , identifier[rmv] )
keyword[if] identifier[dependencies] keyword[and] identifier[self] . identifier[_rmv_deps_answer] () keyword[in] [ literal[string] ,
literal[string] ]:
identifier[rmv_list] += identifier[self] . identifier[_rmv_deps] ( identifier[dependencies] , identifier[rmv] )
keyword[else] :
identifier[rmv_list] += identifier[self] . identifier[_rmv_pkg] ( identifier[rmv] )
keyword[else] :
identifier[rmv_list] += identifier[self] . identifier[_rmv_pkg] ( identifier[rmv] )
identifier[self] . identifier[_reference_rmvs] ( identifier[rmv_list] ) | def remove(self, flag, extra):
"""Remove Slackware binary packages
"""
self.flag = flag
self.extra = extra
self.dep_path = self.meta.log_path + 'dep/'
(dependencies, rmv_list) = ([], [])
self.removed = self._view_removed()
if not self.removed:
print('') # new line at end # depends on [control=['if'], data=[]]
else:
msg = 'package'
if len(self.removed) > 1:
msg = msg + 's' # depends on [control=['if'], data=[]]
try:
if self.meta.default_answer in ['y', 'Y']:
remove_pkg = self.meta.default_answer # depends on [control=['if'], data=[]]
else:
remove_pkg = raw_input('\nAre you sure to remove {0} {1} [y/N]? '.format(str(len(self.removed)), msg)) # depends on [control=['try'], data=[]]
except EOFError:
print('') # new line at exit
raise SystemExit() # depends on [control=['except'], data=[]]
if remove_pkg in ['y', 'Y']:
self._check_if_used(self.binary)
for rmv in self.removed:
# If package build and install with "slpkg -s sbo <package>"
# then look log file for dependencies in /var/log/slpkg/dep,
# read and remove all else remove only the package.
if os.path.isfile(self.dep_path + rmv) and self.meta.del_deps in ['on', 'ON'] or (os.path.isfile(self.dep_path + rmv) and '--deps' in self.extra):
dependencies = self._view_deps(self.dep_path, rmv)
if dependencies and self._rmv_deps_answer() in ['y', 'Y']:
rmv_list += self._rmv_deps(dependencies, rmv) # depends on [control=['if'], data=[]]
else:
rmv_list += self._rmv_pkg(rmv) # depends on [control=['if'], data=[]]
else:
rmv_list += self._rmv_pkg(rmv) # depends on [control=['for'], data=['rmv']]
# Prints all removed packages
self._reference_rmvs(rmv_list) # depends on [control=['if'], data=[]] |
def wipe_table(self, table: str) -> int:
"""Delete all records from a table. Use caution!"""
sql = "DELETE FROM " + self.delimit(table)
return self.db_exec(sql) | def function[wipe_table, parameter[self, table]]:
constant[Delete all records from a table. Use caution!]
variable[sql] assign[=] binary_operation[constant[DELETE FROM ] + call[name[self].delimit, parameter[name[table]]]]
return[call[name[self].db_exec, parameter[name[sql]]]] | keyword[def] identifier[wipe_table] ( identifier[self] , identifier[table] : identifier[str] )-> identifier[int] :
literal[string]
identifier[sql] = literal[string] + identifier[self] . identifier[delimit] ( identifier[table] )
keyword[return] identifier[self] . identifier[db_exec] ( identifier[sql] ) | def wipe_table(self, table: str) -> int:
"""Delete all records from a table. Use caution!"""
sql = 'DELETE FROM ' + self.delimit(table)
return self.db_exec(sql) |
def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count):
"""realtimeBar(EWrapper self, TickerId reqId, long time, double open, double high, double low, double close, long volume, double wap, int count)"""
return _swigibpy.EWrapper_realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count) | def function[realtimeBar, parameter[self, reqId, time, open, high, low, close, volume, wap, count]]:
constant[realtimeBar(EWrapper self, TickerId reqId, long time, double open, double high, double low, double close, long volume, double wap, int count)]
return[call[name[_swigibpy].EWrapper_realtimeBar, parameter[name[self], name[reqId], name[time], name[open], name[high], name[low], name[close], name[volume], name[wap], name[count]]]] | keyword[def] identifier[realtimeBar] ( identifier[self] , identifier[reqId] , identifier[time] , identifier[open] , identifier[high] , identifier[low] , identifier[close] , identifier[volume] , identifier[wap] , identifier[count] ):
literal[string]
keyword[return] identifier[_swigibpy] . identifier[EWrapper_realtimeBar] ( identifier[self] , identifier[reqId] , identifier[time] , identifier[open] , identifier[high] , identifier[low] , identifier[close] , identifier[volume] , identifier[wap] , identifier[count] ) | def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count):
"""realtimeBar(EWrapper self, TickerId reqId, long time, double open, double high, double low, double close, long volume, double wap, int count)"""
return _swigibpy.EWrapper_realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count) |
def as_array(
self,
include_missing=False,
weighted=True,
include_transforms_for_dims=None,
prune=False,
):
"""Return `ndarray` representing cube values.
Returns the tabular representation of the crunch cube. The returned
array has the same number of dimensions as the cube. E.g. for
a cross-tab representation of a categorical and numerical variable,
the resulting cube will have two dimensions.
*include_missing* (bool): Include rows/cols for missing values.
Example 1 (Categorical x Categorical)::
>>> cube = CrunchCube(response)
>>> cube.as_array()
np.array([
[5, 2],
[5, 3],
])
Example 2 (Categorical x Categorical, include missing values)::
>>> cube = CrunchCube(response)
>>> cube.as_array(include_missing=True)
np.array([
[5, 3, 2, 0],
[5, 2, 3, 0],
[0, 0, 0, 0],
])
"""
array = self._as_array(
include_missing=include_missing,
weighted=weighted,
include_transforms_for_dims=include_transforms_for_dims,
)
# ---prune array if pruning was requested---
if prune:
array = self._prune_body(array, transforms=include_transforms_for_dims)
return self._drop_mr_cat_dims(array) | def function[as_array, parameter[self, include_missing, weighted, include_transforms_for_dims, prune]]:
constant[Return `ndarray` representing cube values.
Returns the tabular representation of the crunch cube. The returned
array has the same number of dimensions as the cube. E.g. for
a cross-tab representation of a categorical and numerical variable,
the resulting cube will have two dimensions.
*include_missing* (bool): Include rows/cols for missing values.
Example 1 (Categorical x Categorical)::
>>> cube = CrunchCube(response)
>>> cube.as_array()
np.array([
[5, 2],
[5, 3],
])
Example 2 (Categorical x Categorical, include missing values)::
>>> cube = CrunchCube(response)
>>> cube.as_array(include_missing=True)
np.array([
[5, 3, 2, 0],
[5, 2, 3, 0],
[0, 0, 0, 0],
])
]
variable[array] assign[=] call[name[self]._as_array, parameter[]]
if name[prune] begin[:]
variable[array] assign[=] call[name[self]._prune_body, parameter[name[array]]]
return[call[name[self]._drop_mr_cat_dims, parameter[name[array]]]] | keyword[def] identifier[as_array] (
identifier[self] ,
identifier[include_missing] = keyword[False] ,
identifier[weighted] = keyword[True] ,
identifier[include_transforms_for_dims] = keyword[None] ,
identifier[prune] = keyword[False] ,
):
literal[string]
identifier[array] = identifier[self] . identifier[_as_array] (
identifier[include_missing] = identifier[include_missing] ,
identifier[weighted] = identifier[weighted] ,
identifier[include_transforms_for_dims] = identifier[include_transforms_for_dims] ,
)
keyword[if] identifier[prune] :
identifier[array] = identifier[self] . identifier[_prune_body] ( identifier[array] , identifier[transforms] = identifier[include_transforms_for_dims] )
keyword[return] identifier[self] . identifier[_drop_mr_cat_dims] ( identifier[array] ) | def as_array(self, include_missing=False, weighted=True, include_transforms_for_dims=None, prune=False):
"""Return `ndarray` representing cube values.
Returns the tabular representation of the crunch cube. The returned
array has the same number of dimensions as the cube. E.g. for
a cross-tab representation of a categorical and numerical variable,
the resulting cube will have two dimensions.
*include_missing* (bool): Include rows/cols for missing values.
Example 1 (Categorical x Categorical)::
>>> cube = CrunchCube(response)
>>> cube.as_array()
np.array([
[5, 2],
[5, 3],
])
Example 2 (Categorical x Categorical, include missing values)::
>>> cube = CrunchCube(response)
>>> cube.as_array(include_missing=True)
np.array([
[5, 3, 2, 0],
[5, 2, 3, 0],
[0, 0, 0, 0],
])
"""
array = self._as_array(include_missing=include_missing, weighted=weighted, include_transforms_for_dims=include_transforms_for_dims)
# ---prune array if pruning was requested---
if prune:
array = self._prune_body(array, transforms=include_transforms_for_dims) # depends on [control=['if'], data=[]]
return self._drop_mr_cat_dims(array) |
def p_global_var(p):
'''global_var : VARIABLE
| DOLLAR variable
| DOLLAR LBRACE expr RBRACE'''
if len(p) == 2:
p[0] = ast.Variable(p[1], lineno=p.lineno(1))
elif len(p) == 3:
p[0] = ast.Variable(p[2], lineno=p.lineno(1))
else:
p[0] = ast.Variable(p[3], lineno=p.lineno(1)) | def function[p_global_var, parameter[p]]:
constant[global_var : VARIABLE
| DOLLAR variable
| DOLLAR LBRACE expr RBRACE]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[ast].Variable, parameter[call[name[p]][constant[1]]]] | keyword[def] identifier[p_global_var] ( identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[Variable] ( identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
keyword[elif] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[Variable] ( identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
keyword[else] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[Variable] ( identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] )) | def p_global_var(p):
"""global_var : VARIABLE
| DOLLAR variable
| DOLLAR LBRACE expr RBRACE"""
if len(p) == 2:
p[0] = ast.Variable(p[1], lineno=p.lineno(1)) # depends on [control=['if'], data=[]]
elif len(p) == 3:
p[0] = ast.Variable(p[2], lineno=p.lineno(1)) # depends on [control=['if'], data=[]]
else:
p[0] = ast.Variable(p[3], lineno=p.lineno(1)) |
def all(self, audience=None, page=None, per_page=None, include_totals=False, client_id=None):
"""Retrieves all client grants.
Args:
audience (str, optional): URL encoded audience of a Resource Server
to filter
page (int, optional): The result's page number (zero based).
per_page (int, optional): The amount of entries per page.
include_totals (bool, optional): True if the query summary is
to be included in the result, False otherwise.
client_id (string, optional): The id of a client to filter
See: https://auth0.com/docs/api/management/v2#!/Client_Grants/get_client_grants
"""
params = {
'audience': audience,
'page': page,
'per_page': per_page,
'include_totals': str(include_totals).lower(),
'client_id': client_id,
}
return self.client.get(self._url(), params=params) | def function[all, parameter[self, audience, page, per_page, include_totals, client_id]]:
constant[Retrieves all client grants.
Args:
audience (str, optional): URL encoded audience of a Resource Server
to filter
page (int, optional): The result's page number (zero based).
per_page (int, optional): The amount of entries per page.
include_totals (bool, optional): True if the query summary is
to be included in the result, False otherwise.
client_id (string, optional): The id of a client to filter
See: https://auth0.com/docs/api/management/v2#!/Client_Grants/get_client_grants
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b078a380>, <ast.Constant object at 0x7da1b0789e10>, <ast.Constant object at 0x7da1b0788250>, <ast.Constant object at 0x7da1b0789b70>, <ast.Constant object at 0x7da1b0788760>], [<ast.Name object at 0x7da1b0788100>, <ast.Name object at 0x7da1b07881c0>, <ast.Name object at 0x7da1b078a5c0>, <ast.Call object at 0x7da1b084f130>, <ast.Name object at 0x7da1b084fd90>]]
return[call[name[self].client.get, parameter[call[name[self]._url, parameter[]]]]] | keyword[def] identifier[all] ( identifier[self] , identifier[audience] = keyword[None] , identifier[page] = keyword[None] , identifier[per_page] = keyword[None] , identifier[include_totals] = keyword[False] , identifier[client_id] = keyword[None] ):
literal[string]
identifier[params] ={
literal[string] : identifier[audience] ,
literal[string] : identifier[page] ,
literal[string] : identifier[per_page] ,
literal[string] : identifier[str] ( identifier[include_totals] ). identifier[lower] (),
literal[string] : identifier[client_id] ,
}
keyword[return] identifier[self] . identifier[client] . identifier[get] ( identifier[self] . identifier[_url] (), identifier[params] = identifier[params] ) | def all(self, audience=None, page=None, per_page=None, include_totals=False, client_id=None):
"""Retrieves all client grants.
Args:
audience (str, optional): URL encoded audience of a Resource Server
to filter
page (int, optional): The result's page number (zero based).
per_page (int, optional): The amount of entries per page.
include_totals (bool, optional): True if the query summary is
to be included in the result, False otherwise.
client_id (string, optional): The id of a client to filter
See: https://auth0.com/docs/api/management/v2#!/Client_Grants/get_client_grants
"""
params = {'audience': audience, 'page': page, 'per_page': per_page, 'include_totals': str(include_totals).lower(), 'client_id': client_id}
return self.client.get(self._url(), params=params) |
def setPixmap(self, pixmap):
"""
Sets the pixmap associated with this node.
:param pixmap | <str> || <QtGui.QPixmap> || None
"""
if pixmap is not None:
self._style = XNode.NodeStyle.Pixmap
self._pixmap = QPixmap(pixmap)
else:
self._style = XNode.NodeStyle.Rectangle
self._pixmap = pixmap
self.update() | def function[setPixmap, parameter[self, pixmap]]:
constant[
Sets the pixmap associated with this node.
:param pixmap | <str> || <QtGui.QPixmap> || None
]
if compare[name[pixmap] is_not constant[None]] begin[:]
name[self]._style assign[=] name[XNode].NodeStyle.Pixmap
name[self]._pixmap assign[=] call[name[QPixmap], parameter[name[pixmap]]]
call[name[self].update, parameter[]] | keyword[def] identifier[setPixmap] ( identifier[self] , identifier[pixmap] ):
literal[string]
keyword[if] identifier[pixmap] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_style] = identifier[XNode] . identifier[NodeStyle] . identifier[Pixmap]
identifier[self] . identifier[_pixmap] = identifier[QPixmap] ( identifier[pixmap] )
keyword[else] :
identifier[self] . identifier[_style] = identifier[XNode] . identifier[NodeStyle] . identifier[Rectangle]
identifier[self] . identifier[_pixmap] = identifier[pixmap]
identifier[self] . identifier[update] () | def setPixmap(self, pixmap):
"""
Sets the pixmap associated with this node.
:param pixmap | <str> || <QtGui.QPixmap> || None
"""
if pixmap is not None:
self._style = XNode.NodeStyle.Pixmap
self._pixmap = QPixmap(pixmap) # depends on [control=['if'], data=['pixmap']]
else:
self._style = XNode.NodeStyle.Rectangle
self._pixmap = pixmap
self.update() |
def get(self, rate_type, role, session, fields=[], **kwargs):
'''taobao.traderates.get 搜索评价信息
搜索评价信息,只能获取距今180天内的评价记录'''
request = TOPRequest('taobao.traderates.get')
request['rate_type'] = rate_type
request['role'] = role
if not fields:
tradeRate = TradeRate()
fields = tradeRate.fields
request['fields'] = fields
for k, v in kwargs.iteritems():
if k not in ('result', 'page_no', 'page_size', 'start_date', 'end_date', 'tid') and v==None: continue
request[k] = v
self.create(self.execute(request, session))
return self.trade_rates | def function[get, parameter[self, rate_type, role, session, fields]]:
constant[taobao.traderates.get 搜索评价信息
搜索评价信息,只能获取距今180天内的评价记录]
variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.traderates.get]]]
call[name[request]][constant[rate_type]] assign[=] name[rate_type]
call[name[request]][constant[role]] assign[=] name[role]
if <ast.UnaryOp object at 0x7da1b2583910> begin[:]
variable[tradeRate] assign[=] call[name[TradeRate], parameter[]]
variable[fields] assign[=] name[tradeRate].fields
call[name[request]][constant[fields]] assign[=] name[fields]
for taget[tuple[[<ast.Name object at 0x7da1b25839a0>, <ast.Name object at 0x7da1b25839d0>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b2580310> begin[:]
continue
call[name[request]][name[k]] assign[=] name[v]
call[name[self].create, parameter[call[name[self].execute, parameter[name[request], name[session]]]]]
return[name[self].trade_rates] | keyword[def] identifier[get] ( identifier[self] , identifier[rate_type] , identifier[role] , identifier[session] , identifier[fields] =[],** identifier[kwargs] ):
literal[string]
identifier[request] = identifier[TOPRequest] ( literal[string] )
identifier[request] [ literal[string] ]= identifier[rate_type]
identifier[request] [ literal[string] ]= identifier[role]
keyword[if] keyword[not] identifier[fields] :
identifier[tradeRate] = identifier[TradeRate] ()
identifier[fields] = identifier[tradeRate] . identifier[fields]
identifier[request] [ literal[string] ]= identifier[fields]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[iteritems] ():
keyword[if] identifier[k] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[and] identifier[v] == keyword[None] : keyword[continue]
identifier[request] [ identifier[k] ]= identifier[v]
identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] ))
keyword[return] identifier[self] . identifier[trade_rates] | def get(self, rate_type, role, session, fields=[], **kwargs):
"""taobao.traderates.get 搜索评价信息
搜索评价信息,只能获取距今180天内的评价记录"""
request = TOPRequest('taobao.traderates.get')
request['rate_type'] = rate_type
request['role'] = role
if not fields:
tradeRate = TradeRate()
fields = tradeRate.fields # depends on [control=['if'], data=[]]
request['fields'] = fields
for (k, v) in kwargs.iteritems():
if k not in ('result', 'page_no', 'page_size', 'start_date', 'end_date', 'tid') and v == None:
continue # depends on [control=['if'], data=[]]
request[k] = v # depends on [control=['for'], data=[]]
self.create(self.execute(request, session))
return self.trade_rates |
def initialize(self):
"""Initialize croniter and related times"""
if self.croniter is None:
self.time = time.time()
self.datetime = datetime.now(self.tz)
self.loop_time = self.loop.time()
self.croniter = croniter(self.spec, start_time=self.datetime) | def function[initialize, parameter[self]]:
constant[Initialize croniter and related times]
if compare[name[self].croniter is constant[None]] begin[:]
name[self].time assign[=] call[name[time].time, parameter[]]
name[self].datetime assign[=] call[name[datetime].now, parameter[name[self].tz]]
name[self].loop_time assign[=] call[name[self].loop.time, parameter[]]
name[self].croniter assign[=] call[name[croniter], parameter[name[self].spec]] | keyword[def] identifier[initialize] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[croniter] keyword[is] keyword[None] :
identifier[self] . identifier[time] = identifier[time] . identifier[time] ()
identifier[self] . identifier[datetime] = identifier[datetime] . identifier[now] ( identifier[self] . identifier[tz] )
identifier[self] . identifier[loop_time] = identifier[self] . identifier[loop] . identifier[time] ()
identifier[self] . identifier[croniter] = identifier[croniter] ( identifier[self] . identifier[spec] , identifier[start_time] = identifier[self] . identifier[datetime] ) | def initialize(self):
"""Initialize croniter and related times"""
if self.croniter is None:
self.time = time.time()
self.datetime = datetime.now(self.tz)
self.loop_time = self.loop.time()
self.croniter = croniter(self.spec, start_time=self.datetime) # depends on [control=['if'], data=[]] |
def _assert_no_error(error, exception_class=None):
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == u'':
output = u'OSStatus %s' % error
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output) | def function[_assert_no_error, parameter[error, exception_class]]:
constant[
Checks the return code and throws an exception if there is an error to
report
]
if compare[name[error] equal[==] constant[0]] begin[:]
return[None]
variable[cf_error_string] assign[=] call[name[Security].SecCopyErrorMessageString, parameter[name[error], constant[None]]]
variable[output] assign[=] call[name[_cf_string_to_unicode], parameter[name[cf_error_string]]]
call[name[CoreFoundation].CFRelease, parameter[name[cf_error_string]]]
if <ast.BoolOp object at 0x7da18bcc8fa0> begin[:]
variable[output] assign[=] binary_operation[constant[OSStatus %s] <ast.Mod object at 0x7da2590d6920> name[error]]
if compare[name[exception_class] is constant[None]] begin[:]
variable[exception_class] assign[=] name[ssl].SSLError
<ast.Raise object at 0x7da18bccaec0> | keyword[def] identifier[_assert_no_error] ( identifier[error] , identifier[exception_class] = keyword[None] ):
literal[string]
keyword[if] identifier[error] == literal[int] :
keyword[return]
identifier[cf_error_string] = identifier[Security] . identifier[SecCopyErrorMessageString] ( identifier[error] , keyword[None] )
identifier[output] = identifier[_cf_string_to_unicode] ( identifier[cf_error_string] )
identifier[CoreFoundation] . identifier[CFRelease] ( identifier[cf_error_string] )
keyword[if] identifier[output] keyword[is] keyword[None] keyword[or] identifier[output] == literal[string] :
identifier[output] = literal[string] % identifier[error]
keyword[if] identifier[exception_class] keyword[is] keyword[None] :
identifier[exception_class] = identifier[ssl] . identifier[SSLError]
keyword[raise] identifier[exception_class] ( identifier[output] ) | def _assert_no_error(error, exception_class=None):
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return # depends on [control=['if'], data=[]]
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == u'':
output = u'OSStatus %s' % error # depends on [control=['if'], data=[]]
if exception_class is None:
exception_class = ssl.SSLError # depends on [control=['if'], data=['exception_class']]
raise exception_class(output) |
def parse(source, handler):
'''
Convert XML 1.0 to MicroXML
source - XML 1.0 input
handler - MicroXML events handler
Returns uxml, extras
uxml - MicroXML element extracted from the source
extras - information to be preserved but not part of MicroXML, e.g. namespaces
'''
h = expat_callbacks(handler)
p = xml.parsers.expat.ParserCreate(namespace_separator=' ')
p.StartElementHandler = h.start_element
p.EndElementHandler = h.end_element
p.CharacterDataHandler = h.char_data
p.StartNamespaceDeclHandler = h.start_namespace
p.EndNamespaceDeclHandler = h.end_namespace
p.Parse(source)
return p | def function[parse, parameter[source, handler]]:
constant[
Convert XML 1.0 to MicroXML
source - XML 1.0 input
handler - MicroXML events handler
Returns uxml, extras
uxml - MicroXML element extracted from the source
extras - information to be preserved but not part of MicroXML, e.g. namespaces
]
variable[h] assign[=] call[name[expat_callbacks], parameter[name[handler]]]
variable[p] assign[=] call[name[xml].parsers.expat.ParserCreate, parameter[]]
name[p].StartElementHandler assign[=] name[h].start_element
name[p].EndElementHandler assign[=] name[h].end_element
name[p].CharacterDataHandler assign[=] name[h].char_data
name[p].StartNamespaceDeclHandler assign[=] name[h].start_namespace
name[p].EndNamespaceDeclHandler assign[=] name[h].end_namespace
call[name[p].Parse, parameter[name[source]]]
return[name[p]] | keyword[def] identifier[parse] ( identifier[source] , identifier[handler] ):
literal[string]
identifier[h] = identifier[expat_callbacks] ( identifier[handler] )
identifier[p] = identifier[xml] . identifier[parsers] . identifier[expat] . identifier[ParserCreate] ( identifier[namespace_separator] = literal[string] )
identifier[p] . identifier[StartElementHandler] = identifier[h] . identifier[start_element]
identifier[p] . identifier[EndElementHandler] = identifier[h] . identifier[end_element]
identifier[p] . identifier[CharacterDataHandler] = identifier[h] . identifier[char_data]
identifier[p] . identifier[StartNamespaceDeclHandler] = identifier[h] . identifier[start_namespace]
identifier[p] . identifier[EndNamespaceDeclHandler] = identifier[h] . identifier[end_namespace]
identifier[p] . identifier[Parse] ( identifier[source] )
keyword[return] identifier[p] | def parse(source, handler):
"""
Convert XML 1.0 to MicroXML
source - XML 1.0 input
handler - MicroXML events handler
Returns uxml, extras
uxml - MicroXML element extracted from the source
extras - information to be preserved but not part of MicroXML, e.g. namespaces
"""
h = expat_callbacks(handler)
p = xml.parsers.expat.ParserCreate(namespace_separator=' ')
p.StartElementHandler = h.start_element
p.EndElementHandler = h.end_element
p.CharacterDataHandler = h.char_data
p.StartNamespaceDeclHandler = h.start_namespace
p.EndNamespaceDeclHandler = h.end_namespace
p.Parse(source)
return p |
def _independent_lattice(self, shape, lattice=None):
""" Helper to construct the list of nodes and edges. """
I, J = shape
if lattice is not None:
end_I = min(I, max(lattice[..., 3])) - 1
end_J = min(J, max(lattice[..., 4])) - 1
unvisited_nodes = deque([(i, j, s)
for i in range(end_I)
for j in range(end_J)
for s in self._start_states])
lattice = lattice.tolist()
else:
lattice = []
unvisited_nodes = deque([(0, 0, s) for s in self._start_states])
lattice += _grow_independent_lattice(self._transitions,
self.n_states, (I, J),
unvisited_nodes)
lattice = np.array(sorted(lattice), dtype='int64')
return lattice | def function[_independent_lattice, parameter[self, shape, lattice]]:
constant[ Helper to construct the list of nodes and edges. ]
<ast.Tuple object at 0x7da1b2525e70> assign[=] name[shape]
if compare[name[lattice] is_not constant[None]] begin[:]
variable[end_I] assign[=] binary_operation[call[name[min], parameter[name[I], call[name[max], parameter[call[name[lattice]][tuple[[<ast.Constant object at 0x7da1b2525c60>, <ast.Constant object at 0x7da1b2525b40>]]]]]]] - constant[1]]
variable[end_J] assign[=] binary_operation[call[name[min], parameter[name[J], call[name[max], parameter[call[name[lattice]][tuple[[<ast.Constant object at 0x7da1b2525900>, <ast.Constant object at 0x7da1b2524040>]]]]]]] - constant[1]]
variable[unvisited_nodes] assign[=] call[name[deque], parameter[<ast.ListComp object at 0x7da1b25247c0>]]
variable[lattice] assign[=] call[name[lattice].tolist, parameter[]]
<ast.AugAssign object at 0x7da1b2527f70>
variable[lattice] assign[=] call[name[np].array, parameter[call[name[sorted], parameter[name[lattice]]]]]
return[name[lattice]] | keyword[def] identifier[_independent_lattice] ( identifier[self] , identifier[shape] , identifier[lattice] = keyword[None] ):
literal[string]
identifier[I] , identifier[J] = identifier[shape]
keyword[if] identifier[lattice] keyword[is] keyword[not] keyword[None] :
identifier[end_I] = identifier[min] ( identifier[I] , identifier[max] ( identifier[lattice] [..., literal[int] ]))- literal[int]
identifier[end_J] = identifier[min] ( identifier[J] , identifier[max] ( identifier[lattice] [..., literal[int] ]))- literal[int]
identifier[unvisited_nodes] = identifier[deque] ([( identifier[i] , identifier[j] , identifier[s] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[end_I] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[end_J] )
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_start_states] ])
identifier[lattice] = identifier[lattice] . identifier[tolist] ()
keyword[else] :
identifier[lattice] =[]
identifier[unvisited_nodes] = identifier[deque] ([( literal[int] , literal[int] , identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_start_states] ])
identifier[lattice] += identifier[_grow_independent_lattice] ( identifier[self] . identifier[_transitions] ,
identifier[self] . identifier[n_states] ,( identifier[I] , identifier[J] ),
identifier[unvisited_nodes] )
identifier[lattice] = identifier[np] . identifier[array] ( identifier[sorted] ( identifier[lattice] ), identifier[dtype] = literal[string] )
keyword[return] identifier[lattice] | def _independent_lattice(self, shape, lattice=None):
""" Helper to construct the list of nodes and edges. """
(I, J) = shape
if lattice is not None:
end_I = min(I, max(lattice[..., 3])) - 1
end_J = min(J, max(lattice[..., 4])) - 1
unvisited_nodes = deque([(i, j, s) for i in range(end_I) for j in range(end_J) for s in self._start_states])
lattice = lattice.tolist() # depends on [control=['if'], data=['lattice']]
else:
lattice = []
unvisited_nodes = deque([(0, 0, s) for s in self._start_states])
lattice += _grow_independent_lattice(self._transitions, self.n_states, (I, J), unvisited_nodes)
lattice = np.array(sorted(lattice), dtype='int64')
return lattice |
def get_feature_sequence(self, feature_id, organism=None, sequence=None):
"""
[CURRENTLY BROKEN] Get the sequence of a feature
:type feature_id: str
:param feature_id: Feature UUID
:type organism: str
:param organism: Organism Common Name
:type sequence: str
:param sequence: Sequence Name
:rtype: dict
:return: A standard apollo feature dictionary ({"features": [{...}]})
"""
# Choices: peptide, cds, cdna, genomic
# { "track": "Miro.v2", "features": [ { "uniquename": "714dcda6-2358-467d-855e-f495a82aa154" } ], "operation": "get_sequence", "type": "peptide" }:
# { "track": "Miro.v2", "features": [ { "uniquename": "714dcda6-2358-467d-855e-f495a82aa154" } ], "operation": "get_sequence", "flank": 500, "type": "genomic" }:
# This API is not behaving as expected. Wrong documentation?
data = {
'type': 'peptide',
'features': [
{'uniquename': feature_id}
]
}
data = self._update_data(data, organism, sequence)
return self.post('getSequence', data) | def function[get_feature_sequence, parameter[self, feature_id, organism, sequence]]:
constant[
[CURRENTLY BROKEN] Get the sequence of a feature
:type feature_id: str
:param feature_id: Feature UUID
:type organism: str
:param organism: Organism Common Name
:type sequence: str
:param sequence: Sequence Name
:rtype: dict
:return: A standard apollo feature dictionary ({"features": [{...}]})
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20e962860>, <ast.Constant object at 0x7da20e963370>], [<ast.Constant object at 0x7da20e9631c0>, <ast.List object at 0x7da20e961c30>]]
variable[data] assign[=] call[name[self]._update_data, parameter[name[data], name[organism], name[sequence]]]
return[call[name[self].post, parameter[constant[getSequence], name[data]]]] | keyword[def] identifier[get_feature_sequence] ( identifier[self] , identifier[feature_id] , identifier[organism] = keyword[None] , identifier[sequence] = keyword[None] ):
literal[string]
identifier[data] ={
literal[string] : literal[string] ,
literal[string] :[
{ literal[string] : identifier[feature_id] }
]
}
identifier[data] = identifier[self] . identifier[_update_data] ( identifier[data] , identifier[organism] , identifier[sequence] )
keyword[return] identifier[self] . identifier[post] ( literal[string] , identifier[data] ) | def get_feature_sequence(self, feature_id, organism=None, sequence=None):
"""
[CURRENTLY BROKEN] Get the sequence of a feature
:type feature_id: str
:param feature_id: Feature UUID
:type organism: str
:param organism: Organism Common Name
:type sequence: str
:param sequence: Sequence Name
:rtype: dict
:return: A standard apollo feature dictionary ({"features": [{...}]})
"""
# Choices: peptide, cds, cdna, genomic
# { "track": "Miro.v2", "features": [ { "uniquename": "714dcda6-2358-467d-855e-f495a82aa154" } ], "operation": "get_sequence", "type": "peptide" }:
# { "track": "Miro.v2", "features": [ { "uniquename": "714dcda6-2358-467d-855e-f495a82aa154" } ], "operation": "get_sequence", "flank": 500, "type": "genomic" }:
# This API is not behaving as expected. Wrong documentation?
data = {'type': 'peptide', 'features': [{'uniquename': feature_id}]}
data = self._update_data(data, organism, sequence)
return self.post('getSequence', data) |
def getSubjectInfo(self, subject, vendorSpecific=None):
"""See Also: getSubjectInfoResponse()
Args:
subject:
vendorSpecific:
Returns:
"""
response = self.getSubjectInfoResponse(subject, vendorSpecific)
return self._read_dataone_type_response(response, 'SubjectInfo') | def function[getSubjectInfo, parameter[self, subject, vendorSpecific]]:
constant[See Also: getSubjectInfoResponse()
Args:
subject:
vendorSpecific:
Returns:
]
variable[response] assign[=] call[name[self].getSubjectInfoResponse, parameter[name[subject], name[vendorSpecific]]]
return[call[name[self]._read_dataone_type_response, parameter[name[response], constant[SubjectInfo]]]] | keyword[def] identifier[getSubjectInfo] ( identifier[self] , identifier[subject] , identifier[vendorSpecific] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[getSubjectInfoResponse] ( identifier[subject] , identifier[vendorSpecific] )
keyword[return] identifier[self] . identifier[_read_dataone_type_response] ( identifier[response] , literal[string] ) | def getSubjectInfo(self, subject, vendorSpecific=None):
"""See Also: getSubjectInfoResponse()
Args:
subject:
vendorSpecific:
Returns:
"""
response = self.getSubjectInfoResponse(subject, vendorSpecific)
return self._read_dataone_type_response(response, 'SubjectInfo') |
def init_engine(self, get_loader):
"""
Construct and store a PipelineEngine from loader.
If get_loader is None, constructs an ExplodingPipelineEngine
"""
if get_loader is not None:
self.engine = SimplePipelineEngine(
get_loader,
self.asset_finder,
self.default_pipeline_domain(self.trading_calendar),
)
else:
self.engine = ExplodingPipelineEngine() | def function[init_engine, parameter[self, get_loader]]:
constant[
Construct and store a PipelineEngine from loader.
If get_loader is None, constructs an ExplodingPipelineEngine
]
if compare[name[get_loader] is_not constant[None]] begin[:]
name[self].engine assign[=] call[name[SimplePipelineEngine], parameter[name[get_loader], name[self].asset_finder, call[name[self].default_pipeline_domain, parameter[name[self].trading_calendar]]]] | keyword[def] identifier[init_engine] ( identifier[self] , identifier[get_loader] ):
literal[string]
keyword[if] identifier[get_loader] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[engine] = identifier[SimplePipelineEngine] (
identifier[get_loader] ,
identifier[self] . identifier[asset_finder] ,
identifier[self] . identifier[default_pipeline_domain] ( identifier[self] . identifier[trading_calendar] ),
)
keyword[else] :
identifier[self] . identifier[engine] = identifier[ExplodingPipelineEngine] () | def init_engine(self, get_loader):
"""
Construct and store a PipelineEngine from loader.
If get_loader is None, constructs an ExplodingPipelineEngine
"""
if get_loader is not None:
self.engine = SimplePipelineEngine(get_loader, self.asset_finder, self.default_pipeline_domain(self.trading_calendar)) # depends on [control=['if'], data=['get_loader']]
else:
self.engine = ExplodingPipelineEngine() |
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`."""
self.Y = np.asarray(sp.prox_l1l2(
self.AX + self.U, (self.lmbda / self.rho) * self.wl1,
self.mu / self.rho, axis=-1), dtype=self.dtype)
GenericBPDN.ystep(self) | def function[ystep, parameter[self]]:
constant[Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.]
name[self].Y assign[=] call[name[np].asarray, parameter[call[name[sp].prox_l1l2, parameter[binary_operation[name[self].AX + name[self].U], binary_operation[binary_operation[name[self].lmbda / name[self].rho] * name[self].wl1], binary_operation[name[self].mu / name[self].rho]]]]]
call[name[GenericBPDN].ystep, parameter[name[self]]] | keyword[def] identifier[ystep] ( identifier[self] ):
literal[string]
identifier[self] . identifier[Y] = identifier[np] . identifier[asarray] ( identifier[sp] . identifier[prox_l1l2] (
identifier[self] . identifier[AX] + identifier[self] . identifier[U] ,( identifier[self] . identifier[lmbda] / identifier[self] . identifier[rho] )* identifier[self] . identifier[wl1] ,
identifier[self] . identifier[mu] / identifier[self] . identifier[rho] , identifier[axis] =- literal[int] ), identifier[dtype] = identifier[self] . identifier[dtype] )
identifier[GenericBPDN] . identifier[ystep] ( identifier[self] ) | def ystep(self):
"""Minimise Augmented Lagrangian with respect to
:math:`\\mathbf{y}`."""
self.Y = np.asarray(sp.prox_l1l2(self.AX + self.U, self.lmbda / self.rho * self.wl1, self.mu / self.rho, axis=-1), dtype=self.dtype)
GenericBPDN.ystep(self) |
def remove_namespace(self, namespace):
"""Remove all data associated with the current namespace"""
params = (namespace, )
execute = self.cursor.execute
execute('DELETE FROM gauged_data WHERE namespace = %s', params)
execute('DELETE FROM gauged_statistics WHERE namespace = %s', params)
execute('DELETE FROM gauged_keys WHERE namespace = %s', params)
self.remove_cache(namespace) | def function[remove_namespace, parameter[self, namespace]]:
constant[Remove all data associated with the current namespace]
variable[params] assign[=] tuple[[<ast.Name object at 0x7da1b24b3850>]]
variable[execute] assign[=] name[self].cursor.execute
call[name[execute], parameter[constant[DELETE FROM gauged_data WHERE namespace = %s], name[params]]]
call[name[execute], parameter[constant[DELETE FROM gauged_statistics WHERE namespace = %s], name[params]]]
call[name[execute], parameter[constant[DELETE FROM gauged_keys WHERE namespace = %s], name[params]]]
call[name[self].remove_cache, parameter[name[namespace]]] | keyword[def] identifier[remove_namespace] ( identifier[self] , identifier[namespace] ):
literal[string]
identifier[params] =( identifier[namespace] ,)
identifier[execute] = identifier[self] . identifier[cursor] . identifier[execute]
identifier[execute] ( literal[string] , identifier[params] )
identifier[execute] ( literal[string] , identifier[params] )
identifier[execute] ( literal[string] , identifier[params] )
identifier[self] . identifier[remove_cache] ( identifier[namespace] ) | def remove_namespace(self, namespace):
"""Remove all data associated with the current namespace"""
params = (namespace,)
execute = self.cursor.execute
execute('DELETE FROM gauged_data WHERE namespace = %s', params)
execute('DELETE FROM gauged_statistics WHERE namespace = %s', params)
execute('DELETE FROM gauged_keys WHERE namespace = %s', params)
self.remove_cache(namespace) |
def __add_symbols(self, cmd):
"""
Add all additional defined and undefined symbols.
"""
if self.__config.define_symbols:
symbols = self.__config.define_symbols
cmd.append(''.join(
[' -D"%s"' % def_symbol for def_symbol in symbols]))
if self.__config.undefine_symbols:
un_symbols = self.__config.undefine_symbols
cmd.append(''.join(
[' -U"%s"' % undef_symbol for undef_symbol in un_symbols]))
return cmd | def function[__add_symbols, parameter[self, cmd]]:
constant[
Add all additional defined and undefined symbols.
]
if name[self].__config.define_symbols begin[:]
variable[symbols] assign[=] name[self].__config.define_symbols
call[name[cmd].append, parameter[call[constant[].join, parameter[<ast.ListComp object at 0x7da1b26ae7a0>]]]]
if name[self].__config.undefine_symbols begin[:]
variable[un_symbols] assign[=] name[self].__config.undefine_symbols
call[name[cmd].append, parameter[call[constant[].join, parameter[<ast.ListComp object at 0x7da1b1306290>]]]]
return[name[cmd]] | keyword[def] identifier[__add_symbols] ( identifier[self] , identifier[cmd] ):
literal[string]
keyword[if] identifier[self] . identifier[__config] . identifier[define_symbols] :
identifier[symbols] = identifier[self] . identifier[__config] . identifier[define_symbols]
identifier[cmd] . identifier[append] ( literal[string] . identifier[join] (
[ literal[string] % identifier[def_symbol] keyword[for] identifier[def_symbol] keyword[in] identifier[symbols] ]))
keyword[if] identifier[self] . identifier[__config] . identifier[undefine_symbols] :
identifier[un_symbols] = identifier[self] . identifier[__config] . identifier[undefine_symbols]
identifier[cmd] . identifier[append] ( literal[string] . identifier[join] (
[ literal[string] % identifier[undef_symbol] keyword[for] identifier[undef_symbol] keyword[in] identifier[un_symbols] ]))
keyword[return] identifier[cmd] | def __add_symbols(self, cmd):
"""
Add all additional defined and undefined symbols.
"""
if self.__config.define_symbols:
symbols = self.__config.define_symbols
cmd.append(''.join([' -D"%s"' % def_symbol for def_symbol in symbols])) # depends on [control=['if'], data=[]]
if self.__config.undefine_symbols:
un_symbols = self.__config.undefine_symbols
cmd.append(''.join([' -U"%s"' % undef_symbol for undef_symbol in un_symbols])) # depends on [control=['if'], data=[]]
return cmd |
def p_defaultFlavor(p):
"""defaultFlavor : ',' FLAVOR '(' flavorListWithComma ')'"""
flist = p[4]
# Create dictionary of default flavors based on DSP0004 definition
# of defaults for flavors. This insures that all possible flavors keywords
# are defined in the created dictionary.
flavors = {'ENABLEOVERRIDE': True,
'TOSUBCLASS': True,
'TOINSTANCE': False,
'DISABLEOVERRIDE': False,
'RESTRICTED': False,
'TRANSLATABLE': False}
for i in flist:
flavors[i] = True
p[0] = flavors | def function[p_defaultFlavor, parameter[p]]:
constant[defaultFlavor : ',' FLAVOR '(' flavorListWithComma ')']
variable[flist] assign[=] call[name[p]][constant[4]]
variable[flavors] assign[=] dictionary[[<ast.Constant object at 0x7da1b0c44b80>, <ast.Constant object at 0x7da1b0c442b0>, <ast.Constant object at 0x7da1b0c45cf0>, <ast.Constant object at 0x7da1b0c44730>, <ast.Constant object at 0x7da1b0c47940>, <ast.Constant object at 0x7da1b0c47490>], [<ast.Constant object at 0x7da1b0c45e10>, <ast.Constant object at 0x7da1b0c47f70>, <ast.Constant object at 0x7da1b0c447c0>, <ast.Constant object at 0x7da1b0c458d0>, <ast.Constant object at 0x7da1b0c45210>, <ast.Constant object at 0x7da1b0c46e90>]]
for taget[name[i]] in starred[name[flist]] begin[:]
call[name[flavors]][name[i]] assign[=] constant[True]
call[name[p]][constant[0]] assign[=] name[flavors] | keyword[def] identifier[p_defaultFlavor] ( identifier[p] ):
literal[string]
identifier[flist] = identifier[p] [ literal[int] ]
identifier[flavors] ={ literal[string] : keyword[True] ,
literal[string] : keyword[True] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] }
keyword[for] identifier[i] keyword[in] identifier[flist] :
identifier[flavors] [ identifier[i] ]= keyword[True]
identifier[p] [ literal[int] ]= identifier[flavors] | def p_defaultFlavor(p):
"""defaultFlavor : ',' FLAVOR '(' flavorListWithComma ')'"""
flist = p[4]
# Create dictionary of default flavors based on DSP0004 definition
# of defaults for flavors. This insures that all possible flavors keywords
# are defined in the created dictionary.
flavors = {'ENABLEOVERRIDE': True, 'TOSUBCLASS': True, 'TOINSTANCE': False, 'DISABLEOVERRIDE': False, 'RESTRICTED': False, 'TRANSLATABLE': False}
for i in flist:
flavors[i] = True # depends on [control=['for'], data=['i']]
p[0] = flavors |
def sort_header(header_text):
"""sort the chromosomes in a header text"""
lines = header_text.rstrip().split("\n")
rlens = {}
for ln in lines:
m = re.match('@SQ\tSN:(\S+)\tLN:(\S+)',ln)
if m:
rlens[m.group(1)] = m.group(2)
output = ''
done_lens = False
for ln in lines:
if re.match('@SQ\tSN:',ln):
if not done_lens:
done_lens = True
for chr in sorted(rlens.keys()):
output += "@SQ\tSN:"+chr+"\tLN:"+str(rlens[chr])+"\n"
else:
output += ln.rstrip("\n")+"\n"
return output | def function[sort_header, parameter[header_text]]:
constant[sort the chromosomes in a header text]
variable[lines] assign[=] call[call[name[header_text].rstrip, parameter[]].split, parameter[constant[
]]]
variable[rlens] assign[=] dictionary[[], []]
for taget[name[ln]] in starred[name[lines]] begin[:]
variable[m] assign[=] call[name[re].match, parameter[constant[@SQ SN:(\S+) LN:(\S+)], name[ln]]]
if name[m] begin[:]
call[name[rlens]][call[name[m].group, parameter[constant[1]]]] assign[=] call[name[m].group, parameter[constant[2]]]
variable[output] assign[=] constant[]
variable[done_lens] assign[=] constant[False]
for taget[name[ln]] in starred[name[lines]] begin[:]
if call[name[re].match, parameter[constant[@SQ SN:], name[ln]]] begin[:]
if <ast.UnaryOp object at 0x7da20c992140> begin[:]
variable[done_lens] assign[=] constant[True]
for taget[name[chr]] in starred[call[name[sorted], parameter[call[name[rlens].keys, parameter[]]]]] begin[:]
<ast.AugAssign object at 0x7da20c991180>
return[name[output]] | keyword[def] identifier[sort_header] ( identifier[header_text] ):
literal[string]
identifier[lines] = identifier[header_text] . identifier[rstrip] (). identifier[split] ( literal[string] )
identifier[rlens] ={}
keyword[for] identifier[ln] keyword[in] identifier[lines] :
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[ln] )
keyword[if] identifier[m] :
identifier[rlens] [ identifier[m] . identifier[group] ( literal[int] )]= identifier[m] . identifier[group] ( literal[int] )
identifier[output] = literal[string]
identifier[done_lens] = keyword[False]
keyword[for] identifier[ln] keyword[in] identifier[lines] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[ln] ):
keyword[if] keyword[not] identifier[done_lens] :
identifier[done_lens] = keyword[True]
keyword[for] identifier[chr] keyword[in] identifier[sorted] ( identifier[rlens] . identifier[keys] ()):
identifier[output] += literal[string] + identifier[chr] + literal[string] + identifier[str] ( identifier[rlens] [ identifier[chr] ])+ literal[string]
keyword[else] :
identifier[output] += identifier[ln] . identifier[rstrip] ( literal[string] )+ literal[string]
keyword[return] identifier[output] | def sort_header(header_text):
"""sort the chromosomes in a header text"""
lines = header_text.rstrip().split('\n')
rlens = {}
for ln in lines:
m = re.match('@SQ\tSN:(\\S+)\tLN:(\\S+)', ln)
if m:
rlens[m.group(1)] = m.group(2) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ln']]
output = ''
done_lens = False
for ln in lines:
if re.match('@SQ\tSN:', ln):
if not done_lens:
done_lens = True
for chr in sorted(rlens.keys()):
output += '@SQ\tSN:' + chr + '\tLN:' + str(rlens[chr]) + '\n' # depends on [control=['for'], data=['chr']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
output += ln.rstrip('\n') + '\n' # depends on [control=['for'], data=['ln']]
return output |
def proxy_model(self):
"""
Retrieve the proxy model of this proxied `Part` as a `Part`.
Allows you to retrieve the model of a proxy. But trying to get the catalog model of a part that
has no proxy, will raise an :exc:`NotFoundError`. Only models can have a proxy.
:return: :class:`Part` with category `MODEL` and from which the current part is proxied
:raises NotFoundError: When no proxy model is found
Example
-------
>>> proxy_part = project.model('Proxy based on catalog model')
>>> catalog_model_of_proxy_part = proxy_part.proxy_model()
>>> proxied_material_of_the_bolt_model = project.model('Bolt Material')
>>> proxy_basis_for_the_material_model = proxied_material_of_the_bolt_model.proxy_model()
"""
if self.category != Category.MODEL:
raise IllegalArgumentError("Part {} is not a model, therefore it cannot have a proxy model".format(self))
if 'proxy' in self._json_data and self._json_data.get('proxy'):
catalog_model_id = self._json_data['proxy'].get('id')
return self._client.model(pk=catalog_model_id)
else:
raise NotFoundError("Part {} is not a proxy".format(self.name)) | def function[proxy_model, parameter[self]]:
constant[
Retrieve the proxy model of this proxied `Part` as a `Part`.
Allows you to retrieve the model of a proxy. But trying to get the catalog model of a part that
has no proxy, will raise an :exc:`NotFoundError`. Only models can have a proxy.
:return: :class:`Part` with category `MODEL` and from which the current part is proxied
:raises NotFoundError: When no proxy model is found
Example
-------
>>> proxy_part = project.model('Proxy based on catalog model')
>>> catalog_model_of_proxy_part = proxy_part.proxy_model()
>>> proxied_material_of_the_bolt_model = project.model('Bolt Material')
>>> proxy_basis_for_the_material_model = proxied_material_of_the_bolt_model.proxy_model()
]
if compare[name[self].category not_equal[!=] name[Category].MODEL] begin[:]
<ast.Raise object at 0x7da20c6e74c0>
if <ast.BoolOp object at 0x7da20c6e7bb0> begin[:]
variable[catalog_model_id] assign[=] call[call[name[self]._json_data][constant[proxy]].get, parameter[constant[id]]]
return[call[name[self]._client.model, parameter[]]] | keyword[def] identifier[proxy_model] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[category] != identifier[Category] . identifier[MODEL] :
keyword[raise] identifier[IllegalArgumentError] ( literal[string] . identifier[format] ( identifier[self] ))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_json_data] keyword[and] identifier[self] . identifier[_json_data] . identifier[get] ( literal[string] ):
identifier[catalog_model_id] = identifier[self] . identifier[_json_data] [ literal[string] ]. identifier[get] ( literal[string] )
keyword[return] identifier[self] . identifier[_client] . identifier[model] ( identifier[pk] = identifier[catalog_model_id] )
keyword[else] :
keyword[raise] identifier[NotFoundError] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] )) | def proxy_model(self):
"""
Retrieve the proxy model of this proxied `Part` as a `Part`.
Allows you to retrieve the model of a proxy. But trying to get the catalog model of a part that
has no proxy, will raise an :exc:`NotFoundError`. Only models can have a proxy.
:return: :class:`Part` with category `MODEL` and from which the current part is proxied
:raises NotFoundError: When no proxy model is found
Example
-------
>>> proxy_part = project.model('Proxy based on catalog model')
>>> catalog_model_of_proxy_part = proxy_part.proxy_model()
>>> proxied_material_of_the_bolt_model = project.model('Bolt Material')
>>> proxy_basis_for_the_material_model = proxied_material_of_the_bolt_model.proxy_model()
"""
if self.category != Category.MODEL:
raise IllegalArgumentError('Part {} is not a model, therefore it cannot have a proxy model'.format(self)) # depends on [control=['if'], data=[]]
if 'proxy' in self._json_data and self._json_data.get('proxy'):
catalog_model_id = self._json_data['proxy'].get('id')
return self._client.model(pk=catalog_model_id) # depends on [control=['if'], data=[]]
else:
raise NotFoundError('Part {} is not a proxy'.format(self.name)) |
def header_to_id(header):
"""
We can receive headers in the following formats:
1. unsigned base 16 hex string of variable length
2. [eventual]
:param header: the header to analyze, validate and convert (if needed)
:return: a valid ID to be used internal to the tracer
"""
if not isinstance(header, string_types):
return BAD_ID
try:
# Test that header is truly a hexadecimal value before we try to convert
int(header, 16)
length = len(header)
if length < 16:
# Left pad ID with zeros
header = header.zfill(16)
elif length > 16:
# Phase 0: Discard everything but the last 16byte
header = header[-16:]
return header
except ValueError:
return BAD_ID | def function[header_to_id, parameter[header]]:
constant[
We can receive headers in the following formats:
1. unsigned base 16 hex string of variable length
2. [eventual]
:param header: the header to analyze, validate and convert (if needed)
:return: a valid ID to be used internal to the tracer
]
if <ast.UnaryOp object at 0x7da1b0329c90> begin[:]
return[name[BAD_ID]]
<ast.Try object at 0x7da1b032a050> | keyword[def] identifier[header_to_id] ( identifier[header] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[header] , identifier[string_types] ):
keyword[return] identifier[BAD_ID]
keyword[try] :
identifier[int] ( identifier[header] , literal[int] )
identifier[length] = identifier[len] ( identifier[header] )
keyword[if] identifier[length] < literal[int] :
identifier[header] = identifier[header] . identifier[zfill] ( literal[int] )
keyword[elif] identifier[length] > literal[int] :
identifier[header] = identifier[header] [- literal[int] :]
keyword[return] identifier[header]
keyword[except] identifier[ValueError] :
keyword[return] identifier[BAD_ID] | def header_to_id(header):
"""
We can receive headers in the following formats:
1. unsigned base 16 hex string of variable length
2. [eventual]
:param header: the header to analyze, validate and convert (if needed)
:return: a valid ID to be used internal to the tracer
"""
if not isinstance(header, string_types):
return BAD_ID # depends on [control=['if'], data=[]]
try:
# Test that header is truly a hexadecimal value before we try to convert
int(header, 16)
length = len(header)
if length < 16:
# Left pad ID with zeros
header = header.zfill(16) # depends on [control=['if'], data=[]]
elif length > 16:
# Phase 0: Discard everything but the last 16byte
header = header[-16:] # depends on [control=['if'], data=[]]
return header # depends on [control=['try'], data=[]]
except ValueError:
return BAD_ID # depends on [control=['except'], data=[]] |
def __diff_internal(self):
"""Differentiate a B-spline once, and return the resulting coefficients and Bspline objects.
This preserves the Bspline object nature of the data, enabling recursive implementation
of higher-order differentiation (see `diff`).
The value of the first derivative of `B` at a point `x` can be obtained as::
def diff1(B, x):
terms = B.__diff_internal()
return sum( ci*Bi(x) for ci,Bi in terms )
Returns:
tuple of tuples, where each item is (coefficient, Bspline object).
See:
`diff`: differentiation of any order >= 0
"""
assert self.p > 0, "order of Bspline must be > 0" # we already handle the other case in diff()
# https://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/B-spline/bspline-derv.html
#
t = self.knot_vector
p = self.p
Bi = Bspline( t[:-1], p-1 )
Bip1 = Bspline( t[1:], p-1 )
numer1 = +p
numer2 = -p
denom1 = t[p:-1] - t[:-(p+1)]
denom2 = t[(p+1):] - t[1:-p]
with np.errstate(divide='ignore', invalid='ignore'):
ci = np.where(denom1 != 0., (numer1 / denom1), 0.)
cip1 = np.where(denom2 != 0., (numer2 / denom2), 0.)
return ( (ci,Bi), (cip1,Bip1) ) | def function[__diff_internal, parameter[self]]:
constant[Differentiate a B-spline once, and return the resulting coefficients and Bspline objects.
This preserves the Bspline object nature of the data, enabling recursive implementation
of higher-order differentiation (see `diff`).
The value of the first derivative of `B` at a point `x` can be obtained as::
def diff1(B, x):
terms = B.__diff_internal()
return sum( ci*Bi(x) for ci,Bi in terms )
Returns:
tuple of tuples, where each item is (coefficient, Bspline object).
See:
`diff`: differentiation of any order >= 0
]
assert[compare[name[self].p greater[>] constant[0]]]
variable[t] assign[=] name[self].knot_vector
variable[p] assign[=] name[self].p
variable[Bi] assign[=] call[name[Bspline], parameter[call[name[t]][<ast.Slice object at 0x7da207f9b640>], binary_operation[name[p] - constant[1]]]]
variable[Bip1] assign[=] call[name[Bspline], parameter[call[name[t]][<ast.Slice object at 0x7da207f9a560>], binary_operation[name[p] - constant[1]]]]
variable[numer1] assign[=] <ast.UnaryOp object at 0x7da207f9a110>
variable[numer2] assign[=] <ast.UnaryOp object at 0x7da207f9a080>
variable[denom1] assign[=] binary_operation[call[name[t]][<ast.Slice object at 0x7da207f9a4d0>] - call[name[t]][<ast.Slice object at 0x7da207f9b3a0>]]
variable[denom2] assign[=] binary_operation[call[name[t]][<ast.Slice object at 0x7da207f9a980>] - call[name[t]][<ast.Slice object at 0x7da207f9a0e0>]]
with call[name[np].errstate, parameter[]] begin[:]
variable[ci] assign[=] call[name[np].where, parameter[compare[name[denom1] not_equal[!=] constant[0.0]], binary_operation[name[numer1] / name[denom1]], constant[0.0]]]
variable[cip1] assign[=] call[name[np].where, parameter[compare[name[denom2] not_equal[!=] constant[0.0]], binary_operation[name[numer2] / name[denom2]], constant[0.0]]]
return[tuple[[<ast.Tuple object at 0x7da207f9ad40>, <ast.Tuple object at 0x7da207f9a740>]]] | keyword[def] identifier[__diff_internal] ( identifier[self] ):
literal[string]
keyword[assert] identifier[self] . identifier[p] > literal[int] , literal[string]
identifier[t] = identifier[self] . identifier[knot_vector]
identifier[p] = identifier[self] . identifier[p]
identifier[Bi] = identifier[Bspline] ( identifier[t] [:- literal[int] ], identifier[p] - literal[int] )
identifier[Bip1] = identifier[Bspline] ( identifier[t] [ literal[int] :], identifier[p] - literal[int] )
identifier[numer1] =+ identifier[p]
identifier[numer2] =- identifier[p]
identifier[denom1] = identifier[t] [ identifier[p] :- literal[int] ]- identifier[t] [:-( identifier[p] + literal[int] )]
identifier[denom2] = identifier[t] [( identifier[p] + literal[int] ):]- identifier[t] [ literal[int] :- identifier[p] ]
keyword[with] identifier[np] . identifier[errstate] ( identifier[divide] = literal[string] , identifier[invalid] = literal[string] ):
identifier[ci] = identifier[np] . identifier[where] ( identifier[denom1] != literal[int] ,( identifier[numer1] / identifier[denom1] ), literal[int] )
identifier[cip1] = identifier[np] . identifier[where] ( identifier[denom2] != literal[int] ,( identifier[numer2] / identifier[denom2] ), literal[int] )
keyword[return] (( identifier[ci] , identifier[Bi] ),( identifier[cip1] , identifier[Bip1] )) | def __diff_internal(self):
"""Differentiate a B-spline once, and return the resulting coefficients and Bspline objects.
This preserves the Bspline object nature of the data, enabling recursive implementation
of higher-order differentiation (see `diff`).
The value of the first derivative of `B` at a point `x` can be obtained as::
def diff1(B, x):
terms = B.__diff_internal()
return sum( ci*Bi(x) for ci,Bi in terms )
Returns:
tuple of tuples, where each item is (coefficient, Bspline object).
See:
`diff`: differentiation of any order >= 0
"""
assert self.p > 0, 'order of Bspline must be > 0' # we already handle the other case in diff()
# https://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/B-spline/bspline-derv.html
#
t = self.knot_vector
p = self.p
Bi = Bspline(t[:-1], p - 1)
Bip1 = Bspline(t[1:], p - 1)
numer1 = +p
numer2 = -p
denom1 = t[p:-1] - t[:-(p + 1)]
denom2 = t[p + 1:] - t[1:-p]
with np.errstate(divide='ignore', invalid='ignore'):
ci = np.where(denom1 != 0.0, numer1 / denom1, 0.0)
cip1 = np.where(denom2 != 0.0, numer2 / denom2, 0.0) # depends on [control=['with'], data=[]]
return ((ci, Bi), (cip1, Bip1)) |
def json(self, data):
"""Set the POST/PUT body content in JSON format for this request."""
if data is not None:
self._body = json.dumps(data)
self.add_header('Content-Type', 'application/json') | def function[json, parameter[self, data]]:
constant[Set the POST/PUT body content in JSON format for this request.]
if compare[name[data] is_not constant[None]] begin[:]
name[self]._body assign[=] call[name[json].dumps, parameter[name[data]]]
call[name[self].add_header, parameter[constant[Content-Type], constant[application/json]]] | keyword[def] identifier[json] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_body] = identifier[json] . identifier[dumps] ( identifier[data] )
identifier[self] . identifier[add_header] ( literal[string] , literal[string] ) | def json(self, data):
"""Set the POST/PUT body content in JSON format for this request."""
if data is not None:
self._body = json.dumps(data)
self.add_header('Content-Type', 'application/json') # depends on [control=['if'], data=['data']] |
def set_user_agent(self, name, http):
'''Sets the application name. LibVLC passes this as the user agent string
when a protocol requires it.
@param name: human-readable application name, e.g. "FooBar player 1.2.3".
@param http: HTTP User Agent, e.g. "FooBar/1.2.3 Python/2.6.0".
@version: LibVLC 1.1.1 or later.
'''
return libvlc_set_user_agent(self, str_to_bytes(name), str_to_bytes(http)) | def function[set_user_agent, parameter[self, name, http]]:
constant[Sets the application name. LibVLC passes this as the user agent string
when a protocol requires it.
@param name: human-readable application name, e.g. "FooBar player 1.2.3".
@param http: HTTP User Agent, e.g. "FooBar/1.2.3 Python/2.6.0".
@version: LibVLC 1.1.1 or later.
]
return[call[name[libvlc_set_user_agent], parameter[name[self], call[name[str_to_bytes], parameter[name[name]]], call[name[str_to_bytes], parameter[name[http]]]]]] | keyword[def] identifier[set_user_agent] ( identifier[self] , identifier[name] , identifier[http] ):
literal[string]
keyword[return] identifier[libvlc_set_user_agent] ( identifier[self] , identifier[str_to_bytes] ( identifier[name] ), identifier[str_to_bytes] ( identifier[http] )) | def set_user_agent(self, name, http):
"""Sets the application name. LibVLC passes this as the user agent string
when a protocol requires it.
@param name: human-readable application name, e.g. "FooBar player 1.2.3".
@param http: HTTP User Agent, e.g. "FooBar/1.2.3 Python/2.6.0".
@version: LibVLC 1.1.1 or later.
"""
return libvlc_set_user_agent(self, str_to_bytes(name), str_to_bytes(http)) |
def load_tag(corpus, path):
"""
Iterate over all speakers on load them.
Collect all utterance-idx and create a subset of them.
"""
tag_idx = os.path.basename(path)
data_path = os.path.join(path, 'by_book')
tag_utt_ids = []
for gender_path in MailabsReader.get_folders(data_path):
# IN MIX FOLDERS THERE ARE NO SPEAKERS
# HANDLE EVERY UTT AS DIFFERENT ISSUER
if os.path.basename(gender_path) == 'mix':
utt_ids = MailabsReader.load_books_of_speaker(corpus,
gender_path,
None)
tag_utt_ids.extend(utt_ids)
else:
for speaker_path in MailabsReader.get_folders(gender_path):
speaker = MailabsReader.load_speaker(corpus, speaker_path)
utt_ids = MailabsReader.load_books_of_speaker(corpus,
speaker_path,
speaker)
tag_utt_ids.extend(utt_ids)
filter = subset.MatchingUtteranceIdxFilter(
utterance_idxs=set(tag_utt_ids)
)
subview = subset.Subview(corpus, filter_criteria=[filter])
corpus.import_subview(tag_idx, subview) | def function[load_tag, parameter[corpus, path]]:
constant[
Iterate over all speakers on load them.
Collect all utterance-idx and create a subset of them.
]
variable[tag_idx] assign[=] call[name[os].path.basename, parameter[name[path]]]
variable[data_path] assign[=] call[name[os].path.join, parameter[name[path], constant[by_book]]]
variable[tag_utt_ids] assign[=] list[[]]
for taget[name[gender_path]] in starred[call[name[MailabsReader].get_folders, parameter[name[data_path]]]] begin[:]
if compare[call[name[os].path.basename, parameter[name[gender_path]]] equal[==] constant[mix]] begin[:]
variable[utt_ids] assign[=] call[name[MailabsReader].load_books_of_speaker, parameter[name[corpus], name[gender_path], constant[None]]]
call[name[tag_utt_ids].extend, parameter[name[utt_ids]]]
variable[filter] assign[=] call[name[subset].MatchingUtteranceIdxFilter, parameter[]]
variable[subview] assign[=] call[name[subset].Subview, parameter[name[corpus]]]
call[name[corpus].import_subview, parameter[name[tag_idx], name[subview]]] | keyword[def] identifier[load_tag] ( identifier[corpus] , identifier[path] ):
literal[string]
identifier[tag_idx] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
identifier[data_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
identifier[tag_utt_ids] =[]
keyword[for] identifier[gender_path] keyword[in] identifier[MailabsReader] . identifier[get_folders] ( identifier[data_path] ):
keyword[if] identifier[os] . identifier[path] . identifier[basename] ( identifier[gender_path] )== literal[string] :
identifier[utt_ids] = identifier[MailabsReader] . identifier[load_books_of_speaker] ( identifier[corpus] ,
identifier[gender_path] ,
keyword[None] )
identifier[tag_utt_ids] . identifier[extend] ( identifier[utt_ids] )
keyword[else] :
keyword[for] identifier[speaker_path] keyword[in] identifier[MailabsReader] . identifier[get_folders] ( identifier[gender_path] ):
identifier[speaker] = identifier[MailabsReader] . identifier[load_speaker] ( identifier[corpus] , identifier[speaker_path] )
identifier[utt_ids] = identifier[MailabsReader] . identifier[load_books_of_speaker] ( identifier[corpus] ,
identifier[speaker_path] ,
identifier[speaker] )
identifier[tag_utt_ids] . identifier[extend] ( identifier[utt_ids] )
identifier[filter] = identifier[subset] . identifier[MatchingUtteranceIdxFilter] (
identifier[utterance_idxs] = identifier[set] ( identifier[tag_utt_ids] )
)
identifier[subview] = identifier[subset] . identifier[Subview] ( identifier[corpus] , identifier[filter_criteria] =[ identifier[filter] ])
identifier[corpus] . identifier[import_subview] ( identifier[tag_idx] , identifier[subview] ) | def load_tag(corpus, path):
"""
Iterate over all speakers on load them.
Collect all utterance-idx and create a subset of them.
"""
tag_idx = os.path.basename(path)
data_path = os.path.join(path, 'by_book')
tag_utt_ids = []
for gender_path in MailabsReader.get_folders(data_path):
# IN MIX FOLDERS THERE ARE NO SPEAKERS
# HANDLE EVERY UTT AS DIFFERENT ISSUER
if os.path.basename(gender_path) == 'mix':
utt_ids = MailabsReader.load_books_of_speaker(corpus, gender_path, None)
tag_utt_ids.extend(utt_ids) # depends on [control=['if'], data=[]]
else:
for speaker_path in MailabsReader.get_folders(gender_path):
speaker = MailabsReader.load_speaker(corpus, speaker_path)
utt_ids = MailabsReader.load_books_of_speaker(corpus, speaker_path, speaker)
tag_utt_ids.extend(utt_ids) # depends on [control=['for'], data=['speaker_path']] # depends on [control=['for'], data=['gender_path']]
filter = subset.MatchingUtteranceIdxFilter(utterance_idxs=set(tag_utt_ids))
subview = subset.Subview(corpus, filter_criteria=[filter])
corpus.import_subview(tag_idx, subview) |
def readrows(self):
"""The readrows method reads simply 'combines' the rows of
multiple files OR gunzips the file and then reads the rows
"""
# For each file (may be just one) create a BroLogReader and use it
for self._filepath in self._files:
# Check if the file is zipped
tmp = None
if self._filepath.endswith('.gz'):
tmp = tempfile.NamedTemporaryFile(delete=False)
with gzip.open(self._filepath, 'rb') as f_in, open(tmp.name, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
# Set the file path to the new temp file
self._filepath = tmp.name
# Create a BroLogReader
reader = bro_log_reader.BroLogReader(self._filepath)
for row in reader.readrows():
yield row
# Clean up any temp files
try:
if tmp:
os.remove(tmp.name)
print('Removed temporary file {:s}...'.format(tmp.name))
except IOError:
pass | def function[readrows, parameter[self]]:
constant[The readrows method reads simply 'combines' the rows of
multiple files OR gunzips the file and then reads the rows
]
for taget[name[self]._filepath] in starred[name[self]._files] begin[:]
variable[tmp] assign[=] constant[None]
if call[name[self]._filepath.endswith, parameter[constant[.gz]]] begin[:]
variable[tmp] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
with call[name[gzip].open, parameter[name[self]._filepath, constant[rb]]] begin[:]
call[name[shutil].copyfileobj, parameter[name[f_in], name[f_out]]]
name[self]._filepath assign[=] name[tmp].name
variable[reader] assign[=] call[name[bro_log_reader].BroLogReader, parameter[name[self]._filepath]]
for taget[name[row]] in starred[call[name[reader].readrows, parameter[]]] begin[:]
<ast.Yield object at 0x7da18c4cc9d0>
<ast.Try object at 0x7da18c4cd1b0> | keyword[def] identifier[readrows] ( identifier[self] ):
literal[string]
keyword[for] identifier[self] . identifier[_filepath] keyword[in] identifier[self] . identifier[_files] :
identifier[tmp] = keyword[None]
keyword[if] identifier[self] . identifier[_filepath] . identifier[endswith] ( literal[string] ):
identifier[tmp] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
keyword[with] identifier[gzip] . identifier[open] ( identifier[self] . identifier[_filepath] , literal[string] ) keyword[as] identifier[f_in] , identifier[open] ( identifier[tmp] . identifier[name] , literal[string] ) keyword[as] identifier[f_out] :
identifier[shutil] . identifier[copyfileobj] ( identifier[f_in] , identifier[f_out] )
identifier[self] . identifier[_filepath] = identifier[tmp] . identifier[name]
identifier[reader] = identifier[bro_log_reader] . identifier[BroLogReader] ( identifier[self] . identifier[_filepath] )
keyword[for] identifier[row] keyword[in] identifier[reader] . identifier[readrows] ():
keyword[yield] identifier[row]
keyword[try] :
keyword[if] identifier[tmp] :
identifier[os] . identifier[remove] ( identifier[tmp] . identifier[name] )
identifier[print] ( literal[string] . identifier[format] ( identifier[tmp] . identifier[name] ))
keyword[except] identifier[IOError] :
keyword[pass] | def readrows(self):
"""The readrows method reads simply 'combines' the rows of
multiple files OR gunzips the file and then reads the rows
"""
# For each file (may be just one) create a BroLogReader and use it
for self._filepath in self._files:
# Check if the file is zipped
tmp = None
if self._filepath.endswith('.gz'):
tmp = tempfile.NamedTemporaryFile(delete=False)
with gzip.open(self._filepath, 'rb') as f_in, open(tmp.name, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out) # depends on [control=['with'], data=['f_in']]
# Set the file path to the new temp file
self._filepath = tmp.name # depends on [control=['if'], data=[]]
# Create a BroLogReader
reader = bro_log_reader.BroLogReader(self._filepath)
for row in reader.readrows():
yield row # depends on [control=['for'], data=['row']]
# Clean up any temp files
try:
if tmp:
os.remove(tmp.name)
print('Removed temporary file {:s}...'.format(tmp.name)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] |
def df(self, qname_predicates:bool=False, keep_variable_type:bool=True) -> pd.DataFrame:
''' Multi funcitonal DataFrame with settings '''
local_df = self.df.copy()
if qname_predicates:
for col in self.columns:
local_df.rename({col: self.g.qname(col)})
if not keep_variable_type:
pass
# convert all to strings, watch out for lists
return local_df | def function[df, parameter[self, qname_predicates, keep_variable_type]]:
constant[ Multi funcitonal DataFrame with settings ]
variable[local_df] assign[=] call[name[self].df.copy, parameter[]]
if name[qname_predicates] begin[:]
for taget[name[col]] in starred[name[self].columns] begin[:]
call[name[local_df].rename, parameter[dictionary[[<ast.Name object at 0x7da1b1a44dc0>], [<ast.Call object at 0x7da1b1a44850>]]]]
if <ast.UnaryOp object at 0x7da1b1a453c0> begin[:]
pass
return[name[local_df]] | keyword[def] identifier[df] ( identifier[self] , identifier[qname_predicates] : identifier[bool] = keyword[False] , identifier[keep_variable_type] : identifier[bool] = keyword[True] )-> identifier[pd] . identifier[DataFrame] :
literal[string]
identifier[local_df] = identifier[self] . identifier[df] . identifier[copy] ()
keyword[if] identifier[qname_predicates] :
keyword[for] identifier[col] keyword[in] identifier[self] . identifier[columns] :
identifier[local_df] . identifier[rename] ({ identifier[col] : identifier[self] . identifier[g] . identifier[qname] ( identifier[col] )})
keyword[if] keyword[not] identifier[keep_variable_type] :
keyword[pass]
keyword[return] identifier[local_df] | def df(self, qname_predicates: bool=False, keep_variable_type: bool=True) -> pd.DataFrame:
""" Multi funcitonal DataFrame with settings """
local_df = self.df.copy()
if qname_predicates:
for col in self.columns:
local_df.rename({col: self.g.qname(col)}) # depends on [control=['for'], data=['col']] # depends on [control=['if'], data=[]]
if not keep_variable_type:
pass # depends on [control=['if'], data=[]]
# convert all to strings, watch out for lists
return local_df |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.