code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
async def clean(self):
""" Close all the running tasks watching for a container timeout. All references to
containers are removed: any attempt to was_killed after a call to clean() will return None.
"""
for x in self._running_asyncio_tasks:
x.cancel()
self._container_had_error = set()
self._watching = set()
self._running_asyncio_tasks = set() | <ast.AsyncFunctionDef object at 0x7da20e9b34c0> | keyword[async] keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[_running_asyncio_tasks] :
identifier[x] . identifier[cancel] ()
identifier[self] . identifier[_container_had_error] = identifier[set] ()
identifier[self] . identifier[_watching] = identifier[set] ()
identifier[self] . identifier[_running_asyncio_tasks] = identifier[set] () | async def clean(self):
""" Close all the running tasks watching for a container timeout. All references to
containers are removed: any attempt to was_killed after a call to clean() will return None.
"""
for x in self._running_asyncio_tasks:
x.cancel() # depends on [control=['for'], data=['x']]
self._container_had_error = set()
self._watching = set()
self._running_asyncio_tasks = set() |
def mv_data_dir(target):
"""
Move data_dir to {target} location, refineable in case data_dir is a mounted volume or object storage and needs special treatments
:return:
"""
from django_productline.context import PRODUCT_CONTEXT
os.rename(PRODUCT_CONTEXT.DATA_DIR, target) | def function[mv_data_dir, parameter[target]]:
constant[
Move data_dir to {target} location, refineable in case data_dir is a mounted volume or object storage and needs special treatments
:return:
]
from relative_module[django_productline.context] import module[PRODUCT_CONTEXT]
call[name[os].rename, parameter[name[PRODUCT_CONTEXT].DATA_DIR, name[target]]] | keyword[def] identifier[mv_data_dir] ( identifier[target] ):
literal[string]
keyword[from] identifier[django_productline] . identifier[context] keyword[import] identifier[PRODUCT_CONTEXT]
identifier[os] . identifier[rename] ( identifier[PRODUCT_CONTEXT] . identifier[DATA_DIR] , identifier[target] ) | def mv_data_dir(target):
"""
Move data_dir to {target} location, refineable in case data_dir is a mounted volume or object storage and needs special treatments
:return:
"""
from django_productline.context import PRODUCT_CONTEXT
os.rename(PRODUCT_CONTEXT.DATA_DIR, target) |
def get_next(self, query, **options):
"""Returns a tuple containing the next page of events and a sync token for the given query (and optional 'sync' token)"""
iterator = EventsPageIterator(self.client, '/events', query, options)
result = iterator.next()
return (result, iterator.sync) | def function[get_next, parameter[self, query]]:
constant[Returns a tuple containing the next page of events and a sync token for the given query (and optional 'sync' token)]
variable[iterator] assign[=] call[name[EventsPageIterator], parameter[name[self].client, constant[/events], name[query], name[options]]]
variable[result] assign[=] call[name[iterator].next, parameter[]]
return[tuple[[<ast.Name object at 0x7da18eb54fd0>, <ast.Attribute object at 0x7da18eb57310>]]] | keyword[def] identifier[get_next] ( identifier[self] , identifier[query] ,** identifier[options] ):
literal[string]
identifier[iterator] = identifier[EventsPageIterator] ( identifier[self] . identifier[client] , literal[string] , identifier[query] , identifier[options] )
identifier[result] = identifier[iterator] . identifier[next] ()
keyword[return] ( identifier[result] , identifier[iterator] . identifier[sync] ) | def get_next(self, query, **options):
"""Returns a tuple containing the next page of events and a sync token for the given query (and optional 'sync' token)"""
iterator = EventsPageIterator(self.client, '/events', query, options)
result = iterator.next()
return (result, iterator.sync) |
def cron(self, pattern, name, *args, **kwargs):
"""
A function to setup a RQ job as a cronjob::
@rq.job('low', timeout=60)
def add(x, y):
return x + y
add.cron('* * * * *', 'add-some-numbers', 1, 2, timeout=10)
:param \\*args: The positional arguments to pass to the queued job.
:param \\*\\*kwargs: The keyword arguments to pass to the queued job.
:param pattern: A Crontab pattern.
:type pattern: str
:param name: The name of the cronjob.
:type name: str
:param queue: Name of the queue to queue in, defaults to
queue of of job or :attr:`~flask_rq2.RQ.default_queue`.
:type queue: str
:param timeout: The job timeout in seconds.
If not provided uses the job's timeout or
:attr:`~flask_rq2.RQ.default_timeout`.
:type timeout: int
:param description: Description of the job.
:type description: str
:param repeat: The number of times the job needs to be repeatedly
queued via the cronjob. Take care only using this for
cronjob that don't already repeat themselves natively
due to their crontab.
:type repeat: int
:return: An RQ job instance.
:rtype: ~flask_rq2.job.FlaskJob
"""
queue_name = kwargs.pop('queue', self.queue_name)
timeout = kwargs.pop('timeout', self.timeout)
description = kwargs.pop('description', None)
repeat = kwargs.pop('repeat', None)
return self.rq.get_scheduler().cron(
pattern,
self.wrapped,
args=args,
kwargs=kwargs,
repeat=repeat,
queue_name=queue_name,
id='cron-%s' % name,
timeout=timeout,
description=description,
) | def function[cron, parameter[self, pattern, name]]:
constant[
A function to setup a RQ job as a cronjob::
@rq.job('low', timeout=60)
def add(x, y):
return x + y
add.cron('* * * * *', 'add-some-numbers', 1, 2, timeout=10)
:param \*args: The positional arguments to pass to the queued job.
:param \*\*kwargs: The keyword arguments to pass to the queued job.
:param pattern: A Crontab pattern.
:type pattern: str
:param name: The name of the cronjob.
:type name: str
:param queue: Name of the queue to queue in, defaults to
queue of of job or :attr:`~flask_rq2.RQ.default_queue`.
:type queue: str
:param timeout: The job timeout in seconds.
If not provided uses the job's timeout or
:attr:`~flask_rq2.RQ.default_timeout`.
:type timeout: int
:param description: Description of the job.
:type description: str
:param repeat: The number of times the job needs to be repeatedly
queued via the cronjob. Take care only using this for
cronjob that don't already repeat themselves natively
due to their crontab.
:type repeat: int
:return: An RQ job instance.
:rtype: ~flask_rq2.job.FlaskJob
]
variable[queue_name] assign[=] call[name[kwargs].pop, parameter[constant[queue], name[self].queue_name]]
variable[timeout] assign[=] call[name[kwargs].pop, parameter[constant[timeout], name[self].timeout]]
variable[description] assign[=] call[name[kwargs].pop, parameter[constant[description], constant[None]]]
variable[repeat] assign[=] call[name[kwargs].pop, parameter[constant[repeat], constant[None]]]
return[call[call[name[self].rq.get_scheduler, parameter[]].cron, parameter[name[pattern], name[self].wrapped]]] | keyword[def] identifier[cron] ( identifier[self] , identifier[pattern] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[queue_name] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[queue_name] )
identifier[timeout] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[timeout] )
identifier[description] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[repeat] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[return] identifier[self] . identifier[rq] . identifier[get_scheduler] (). identifier[cron] (
identifier[pattern] ,
identifier[self] . identifier[wrapped] ,
identifier[args] = identifier[args] ,
identifier[kwargs] = identifier[kwargs] ,
identifier[repeat] = identifier[repeat] ,
identifier[queue_name] = identifier[queue_name] ,
identifier[id] = literal[string] % identifier[name] ,
identifier[timeout] = identifier[timeout] ,
identifier[description] = identifier[description] ,
) | def cron(self, pattern, name, *args, **kwargs):
"""
A function to setup a RQ job as a cronjob::
@rq.job('low', timeout=60)
def add(x, y):
return x + y
add.cron('* * * * *', 'add-some-numbers', 1, 2, timeout=10)
:param \\*args: The positional arguments to pass to the queued job.
:param \\*\\*kwargs: The keyword arguments to pass to the queued job.
:param pattern: A Crontab pattern.
:type pattern: str
:param name: The name of the cronjob.
:type name: str
:param queue: Name of the queue to queue in, defaults to
queue of of job or :attr:`~flask_rq2.RQ.default_queue`.
:type queue: str
:param timeout: The job timeout in seconds.
If not provided uses the job's timeout or
:attr:`~flask_rq2.RQ.default_timeout`.
:type timeout: int
:param description: Description of the job.
:type description: str
:param repeat: The number of times the job needs to be repeatedly
queued via the cronjob. Take care only using this for
cronjob that don't already repeat themselves natively
due to their crontab.
:type repeat: int
:return: An RQ job instance.
:rtype: ~flask_rq2.job.FlaskJob
"""
queue_name = kwargs.pop('queue', self.queue_name)
timeout = kwargs.pop('timeout', self.timeout)
description = kwargs.pop('description', None)
repeat = kwargs.pop('repeat', None)
return self.rq.get_scheduler().cron(pattern, self.wrapped, args=args, kwargs=kwargs, repeat=repeat, queue_name=queue_name, id='cron-%s' % name, timeout=timeout, description=description) |
def _perform_request(self, request_type, resource, **kwargs):
'''
Utility method that performs all requests.
'''
request_type_methods = set(["get", "post", "put", "delete"])
if request_type not in request_type_methods:
raise Exception("Unknown request type. Supported request types are"
": {0}".format(", ".join(request_type_methods)))
uri = "{0}{1}{2}".format(self.uri_prefix, self.domain, resource)
# set a timeout, just to be safe
kwargs["timeout"] = self.timeout
response = getattr(self.session, request_type)(uri, **kwargs)
# handle errors
if response.status_code not in (200, 202):
_raise_for_status(response)
# when responses have no content body (ie. delete, set_permission),
# simply return the whole response
if not response.text:
return response
# for other request types, return most useful data
content_type = response.headers.get('content-type').strip().lower()
if re.match(r'application\/json', content_type):
return response.json()
elif re.match(r'text\/csv', content_type):
csv_stream = StringIO(response.text)
return [line for line in csv.reader(csv_stream)]
elif re.match(r'application\/rdf\+xml', content_type):
return response.content
elif re.match(r'text\/plain', content_type):
try:
return json.loads(response.text)
except ValueError:
return response.text
else:
raise Exception("Unknown response format: {0}"
.format(content_type)) | def function[_perform_request, parameter[self, request_type, resource]]:
constant[
Utility method that performs all requests.
]
variable[request_type_methods] assign[=] call[name[set], parameter[list[[<ast.Constant object at 0x7da204621fc0>, <ast.Constant object at 0x7da204623010>, <ast.Constant object at 0x7da204620760>, <ast.Constant object at 0x7da204623790>]]]]
if compare[name[request_type] <ast.NotIn object at 0x7da2590d7190> name[request_type_methods]] begin[:]
<ast.Raise object at 0x7da204620b80>
variable[uri] assign[=] call[constant[{0}{1}{2}].format, parameter[name[self].uri_prefix, name[self].domain, name[resource]]]
call[name[kwargs]][constant[timeout]] assign[=] name[self].timeout
variable[response] assign[=] call[call[name[getattr], parameter[name[self].session, name[request_type]]], parameter[name[uri]]]
if compare[name[response].status_code <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2046203a0>, <ast.Constant object at 0x7da204620790>]]] begin[:]
call[name[_raise_for_status], parameter[name[response]]]
if <ast.UnaryOp object at 0x7da204622350> begin[:]
return[name[response]]
variable[content_type] assign[=] call[call[call[name[response].headers.get, parameter[constant[content-type]]].strip, parameter[]].lower, parameter[]]
if call[name[re].match, parameter[constant[application\/json], name[content_type]]] begin[:]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[_perform_request] ( identifier[self] , identifier[request_type] , identifier[resource] ,** identifier[kwargs] ):
literal[string]
identifier[request_type_methods] = identifier[set] ([ literal[string] , literal[string] , literal[string] , literal[string] ])
keyword[if] identifier[request_type] keyword[not] keyword[in] identifier[request_type_methods] :
keyword[raise] identifier[Exception] ( literal[string]
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[request_type_methods] )))
identifier[uri] = literal[string] . identifier[format] ( identifier[self] . identifier[uri_prefix] , identifier[self] . identifier[domain] , identifier[resource] )
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[timeout]
identifier[response] = identifier[getattr] ( identifier[self] . identifier[session] , identifier[request_type] )( identifier[uri] ,** identifier[kwargs] )
keyword[if] identifier[response] . identifier[status_code] keyword[not] keyword[in] ( literal[int] , literal[int] ):
identifier[_raise_for_status] ( identifier[response] )
keyword[if] keyword[not] identifier[response] . identifier[text] :
keyword[return] identifier[response]
identifier[content_type] = identifier[response] . identifier[headers] . identifier[get] ( literal[string] ). identifier[strip] (). identifier[lower] ()
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[content_type] ):
keyword[return] identifier[response] . identifier[json] ()
keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[content_type] ):
identifier[csv_stream] = identifier[StringIO] ( identifier[response] . identifier[text] )
keyword[return] [ identifier[line] keyword[for] identifier[line] keyword[in] identifier[csv] . identifier[reader] ( identifier[csv_stream] )]
keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[content_type] ):
keyword[return] identifier[response] . identifier[content]
keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[content_type] ):
keyword[try] :
keyword[return] identifier[json] . identifier[loads] ( identifier[response] . identifier[text] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[response] . identifier[text]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string]
. identifier[format] ( identifier[content_type] )) | def _perform_request(self, request_type, resource, **kwargs):
"""
Utility method that performs all requests.
"""
request_type_methods = set(['get', 'post', 'put', 'delete'])
if request_type not in request_type_methods:
raise Exception('Unknown request type. Supported request types are: {0}'.format(', '.join(request_type_methods))) # depends on [control=['if'], data=['request_type_methods']]
uri = '{0}{1}{2}'.format(self.uri_prefix, self.domain, resource)
# set a timeout, just to be safe
kwargs['timeout'] = self.timeout
response = getattr(self.session, request_type)(uri, **kwargs)
# handle errors
if response.status_code not in (200, 202):
_raise_for_status(response) # depends on [control=['if'], data=[]]
# when responses have no content body (ie. delete, set_permission),
# simply return the whole response
if not response.text:
return response # depends on [control=['if'], data=[]]
# for other request types, return most useful data
content_type = response.headers.get('content-type').strip().lower()
if re.match('application\\/json', content_type):
return response.json() # depends on [control=['if'], data=[]]
elif re.match('text\\/csv', content_type):
csv_stream = StringIO(response.text)
return [line for line in csv.reader(csv_stream)] # depends on [control=['if'], data=[]]
elif re.match('application\\/rdf\\+xml', content_type):
return response.content # depends on [control=['if'], data=[]]
elif re.match('text\\/plain', content_type):
try:
return json.loads(response.text) # depends on [control=['try'], data=[]]
except ValueError:
return response.text # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
raise Exception('Unknown response format: {0}'.format(content_type)) |
def update_hash(a_hash, mv):
"""Adds ``mv`` to ``a_hash``
Args:
a_hash (`Hash`): the secure hash, e.g created by hashlib.md5
mv (:class:`MetricValue`): the instance to add to the hash
"""
if mv.labels:
signing.add_dict_to_hash(a_hash, encoding.MessageToPyValue(mv.labels))
money_value = mv.get_assigned_value(u'moneyValue')
if money_value is not None:
a_hash.update(b'\x00')
a_hash.update(money_value.currencyCode.encode('utf-8')) | def function[update_hash, parameter[a_hash, mv]]:
constant[Adds ``mv`` to ``a_hash``
Args:
a_hash (`Hash`): the secure hash, e.g created by hashlib.md5
mv (:class:`MetricValue`): the instance to add to the hash
]
if name[mv].labels begin[:]
call[name[signing].add_dict_to_hash, parameter[name[a_hash], call[name[encoding].MessageToPyValue, parameter[name[mv].labels]]]]
variable[money_value] assign[=] call[name[mv].get_assigned_value, parameter[constant[moneyValue]]]
if compare[name[money_value] is_not constant[None]] begin[:]
call[name[a_hash].update, parameter[constant[b'\x00']]]
call[name[a_hash].update, parameter[call[name[money_value].currencyCode.encode, parameter[constant[utf-8]]]]] | keyword[def] identifier[update_hash] ( identifier[a_hash] , identifier[mv] ):
literal[string]
keyword[if] identifier[mv] . identifier[labels] :
identifier[signing] . identifier[add_dict_to_hash] ( identifier[a_hash] , identifier[encoding] . identifier[MessageToPyValue] ( identifier[mv] . identifier[labels] ))
identifier[money_value] = identifier[mv] . identifier[get_assigned_value] ( literal[string] )
keyword[if] identifier[money_value] keyword[is] keyword[not] keyword[None] :
identifier[a_hash] . identifier[update] ( literal[string] )
identifier[a_hash] . identifier[update] ( identifier[money_value] . identifier[currencyCode] . identifier[encode] ( literal[string] )) | def update_hash(a_hash, mv):
"""Adds ``mv`` to ``a_hash``
Args:
a_hash (`Hash`): the secure hash, e.g created by hashlib.md5
mv (:class:`MetricValue`): the instance to add to the hash
"""
if mv.labels:
signing.add_dict_to_hash(a_hash, encoding.MessageToPyValue(mv.labels)) # depends on [control=['if'], data=[]]
money_value = mv.get_assigned_value(u'moneyValue')
if money_value is not None:
a_hash.update(b'\x00')
a_hash.update(money_value.currencyCode.encode('utf-8')) # depends on [control=['if'], data=['money_value']] |
def startThread(self):
"""Spawns new NSThread to handle notifications."""
if self._thread is not None:
return
self._thread = NSThread.alloc().initWithTarget_selector_object_(self, 'runPowerNotificationsThread', None)
self._thread.start() | def function[startThread, parameter[self]]:
constant[Spawns new NSThread to handle notifications.]
if compare[name[self]._thread is_not constant[None]] begin[:]
return[None]
name[self]._thread assign[=] call[call[name[NSThread].alloc, parameter[]].initWithTarget_selector_object_, parameter[name[self], constant[runPowerNotificationsThread], constant[None]]]
call[name[self]._thread.start, parameter[]] | keyword[def] identifier[startThread] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_thread] keyword[is] keyword[not] keyword[None] :
keyword[return]
identifier[self] . identifier[_thread] = identifier[NSThread] . identifier[alloc] (). identifier[initWithTarget_selector_object_] ( identifier[self] , literal[string] , keyword[None] )
identifier[self] . identifier[_thread] . identifier[start] () | def startThread(self):
"""Spawns new NSThread to handle notifications."""
if self._thread is not None:
return # depends on [control=['if'], data=[]]
self._thread = NSThread.alloc().initWithTarget_selector_object_(self, 'runPowerNotificationsThread', None)
self._thread.start() |
def parse(text: str, style: Style = Style.auto) -> Docstring:
"""
Parse the docstring into its components.
:param text: docstring text to parse
:param style: docstring style
:returns: parsed docstring representation
"""
if style != Style.auto:
return _styles[style](text)
rets = []
for parse_ in _styles.values():
try:
rets.append(parse_(text))
except ParseError as e:
exc = e
if not rets:
raise exc
return sorted(rets, key=lambda d: len(d.meta), reverse=True)[0] | def function[parse, parameter[text, style]]:
constant[
Parse the docstring into its components.
:param text: docstring text to parse
:param style: docstring style
:returns: parsed docstring representation
]
if compare[name[style] not_equal[!=] name[Style].auto] begin[:]
return[call[call[name[_styles]][name[style]], parameter[name[text]]]]
variable[rets] assign[=] list[[]]
for taget[name[parse_]] in starred[call[name[_styles].values, parameter[]]] begin[:]
<ast.Try object at 0x7da1b0ed2500>
if <ast.UnaryOp object at 0x7da1b0e17fd0> begin[:]
<ast.Raise object at 0x7da1b0e14280>
return[call[call[name[sorted], parameter[name[rets]]]][constant[0]]] | keyword[def] identifier[parse] ( identifier[text] : identifier[str] , identifier[style] : identifier[Style] = identifier[Style] . identifier[auto] )-> identifier[Docstring] :
literal[string]
keyword[if] identifier[style] != identifier[Style] . identifier[auto] :
keyword[return] identifier[_styles] [ identifier[style] ]( identifier[text] )
identifier[rets] =[]
keyword[for] identifier[parse_] keyword[in] identifier[_styles] . identifier[values] ():
keyword[try] :
identifier[rets] . identifier[append] ( identifier[parse_] ( identifier[text] ))
keyword[except] identifier[ParseError] keyword[as] identifier[e] :
identifier[exc] = identifier[e]
keyword[if] keyword[not] identifier[rets] :
keyword[raise] identifier[exc]
keyword[return] identifier[sorted] ( identifier[rets] , identifier[key] = keyword[lambda] identifier[d] : identifier[len] ( identifier[d] . identifier[meta] ), identifier[reverse] = keyword[True] )[ literal[int] ] | def parse(text: str, style: Style=Style.auto) -> Docstring:
"""
Parse the docstring into its components.
:param text: docstring text to parse
:param style: docstring style
:returns: parsed docstring representation
"""
if style != Style.auto:
return _styles[style](text) # depends on [control=['if'], data=['style']]
rets = []
for parse_ in _styles.values():
try:
rets.append(parse_(text)) # depends on [control=['try'], data=[]]
except ParseError as e:
exc = e # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['parse_']]
if not rets:
raise exc # depends on [control=['if'], data=[]]
return sorted(rets, key=lambda d: len(d.meta), reverse=True)[0] |
def jvm_dependency_map(self):
"""A map of each JvmTarget in the context to the set of JvmTargets it depends on "directly".
"Directly" is in quotes here because it isn't quite the same as its normal use, which would be
filter(self._is_jvm_target, target.dependencies).
For this method, we define the set of dependencies which `target` depends on "directly" as:
{ dep | dep is a JvmTarget and exists a directed path p from target to dep such that |p| = 1 }
Where |p| is computed as the weighted sum of all edges in the path, where edges to a JvmTarget
have weight 1, and all other edges have weight 0.
In other words, a JvmTarget 'A' "directly" depends on a JvmTarget 'B' iff there exists a path in
the directed dependency graph from 'A' to 'B' such that there are no internal vertices in the
path that are JvmTargets.
This set is a (not necessarily proper) subset of the set of all JvmTargets that the target
transitively depends on. The algorithms using this map *would* operate correctly on the full
transitive superset, but it is more efficient to use this subset.
The intuition for why we can get away with using this subset: Consider targets A, b, C, D,
such that A depends on b, which depends on C, which depends on D. Say A,C,D are JvmTargets.
If A is on java 6 and C is on java 7, we obviously have a problem, and this will be correctly
identified when verifying the jvm dependencies of A, because the path A->b->C has length 1.
If instead, A is on java 6, and C is on java 6, but D is on java 7, we still have a problem.
It will not be detected when processing A, because A->b->C->D has length 2. But when we process
C, it will be picked up, because C->D has length 1.
Unfortunately, we can't do something as simple as just using actual direct dependencies, because
it's perfectly legal for a java 6 A to depend on b (which is a non-JvmTarget), and legal for
b to depend on a java 7 C, so the transitive information is needed to correctly identify the
problem.
:return: the dict mapping JvmTarget -> set of JvmTargets.
"""
jvm_deps = self._unfiltered_jvm_dependency_map()
return {target: deps for target, deps in jvm_deps.items()
if deps and self._is_jvm_target(target)} | def function[jvm_dependency_map, parameter[self]]:
constant[A map of each JvmTarget in the context to the set of JvmTargets it depends on "directly".
"Directly" is in quotes here because it isn't quite the same as its normal use, which would be
filter(self._is_jvm_target, target.dependencies).
For this method, we define the set of dependencies which `target` depends on "directly" as:
{ dep | dep is a JvmTarget and exists a directed path p from target to dep such that |p| = 1 }
Where |p| is computed as the weighted sum of all edges in the path, where edges to a JvmTarget
have weight 1, and all other edges have weight 0.
In other words, a JvmTarget 'A' "directly" depends on a JvmTarget 'B' iff there exists a path in
the directed dependency graph from 'A' to 'B' such that there are no internal vertices in the
path that are JvmTargets.
This set is a (not necessarily proper) subset of the set of all JvmTargets that the target
transitively depends on. The algorithms using this map *would* operate correctly on the full
transitive superset, but it is more efficient to use this subset.
The intuition for why we can get away with using this subset: Consider targets A, b, C, D,
such that A depends on b, which depends on C, which depends on D. Say A,C,D are JvmTargets.
If A is on java 6 and C is on java 7, we obviously have a problem, and this will be correctly
identified when verifying the jvm dependencies of A, because the path A->b->C has length 1.
If instead, A is on java 6, and C is on java 6, but D is on java 7, we still have a problem.
It will not be detected when processing A, because A->b->C->D has length 2. But when we process
C, it will be picked up, because C->D has length 1.
Unfortunately, we can't do something as simple as just using actual direct dependencies, because
it's perfectly legal for a java 6 A to depend on b (which is a non-JvmTarget), and legal for
b to depend on a java 7 C, so the transitive information is needed to correctly identify the
problem.
:return: the dict mapping JvmTarget -> set of JvmTargets.
]
variable[jvm_deps] assign[=] call[name[self]._unfiltered_jvm_dependency_map, parameter[]]
return[<ast.DictComp object at 0x7da1b1e8f280>] | keyword[def] identifier[jvm_dependency_map] ( identifier[self] ):
literal[string]
identifier[jvm_deps] = identifier[self] . identifier[_unfiltered_jvm_dependency_map] ()
keyword[return] { identifier[target] : identifier[deps] keyword[for] identifier[target] , identifier[deps] keyword[in] identifier[jvm_deps] . identifier[items] ()
keyword[if] identifier[deps] keyword[and] identifier[self] . identifier[_is_jvm_target] ( identifier[target] )} | def jvm_dependency_map(self):
"""A map of each JvmTarget in the context to the set of JvmTargets it depends on "directly".
"Directly" is in quotes here because it isn't quite the same as its normal use, which would be
filter(self._is_jvm_target, target.dependencies).
For this method, we define the set of dependencies which `target` depends on "directly" as:
{ dep | dep is a JvmTarget and exists a directed path p from target to dep such that |p| = 1 }
Where |p| is computed as the weighted sum of all edges in the path, where edges to a JvmTarget
have weight 1, and all other edges have weight 0.
In other words, a JvmTarget 'A' "directly" depends on a JvmTarget 'B' iff there exists a path in
the directed dependency graph from 'A' to 'B' such that there are no internal vertices in the
path that are JvmTargets.
This set is a (not necessarily proper) subset of the set of all JvmTargets that the target
transitively depends on. The algorithms using this map *would* operate correctly on the full
transitive superset, but it is more efficient to use this subset.
The intuition for why we can get away with using this subset: Consider targets A, b, C, D,
such that A depends on b, which depends on C, which depends on D. Say A,C,D are JvmTargets.
If A is on java 6 and C is on java 7, we obviously have a problem, and this will be correctly
identified when verifying the jvm dependencies of A, because the path A->b->C has length 1.
If instead, A is on java 6, and C is on java 6, but D is on java 7, we still have a problem.
It will not be detected when processing A, because A->b->C->D has length 2. But when we process
C, it will be picked up, because C->D has length 1.
Unfortunately, we can't do something as simple as just using actual direct dependencies, because
it's perfectly legal for a java 6 A to depend on b (which is a non-JvmTarget), and legal for
b to depend on a java 7 C, so the transitive information is needed to correctly identify the
problem.
:return: the dict mapping JvmTarget -> set of JvmTargets.
"""
jvm_deps = self._unfiltered_jvm_dependency_map()
return {target: deps for (target, deps) in jvm_deps.items() if deps and self._is_jvm_target(target)} |
def insertBPoint(self, index, type=None, anchor=None, bcpIn=None, bcpOut=None, bPoint=None):
"""
Insert a bPoint at index in the contour.
"""
if bPoint is not None:
if type is None:
type = bPoint.type
if anchor is None:
anchor = bPoint.anchor
if bcpIn is None:
bcpIn = bPoint.bcpIn
if bcpOut is None:
bcpOut = bPoint.bcpOut
index = normalizers.normalizeIndex(index)
type = normalizers.normalizeBPointType(type)
anchor = normalizers.normalizeCoordinateTuple(anchor)
if bcpIn is None:
bcpIn = (0, 0)
bcpIn = normalizers.normalizeCoordinateTuple(bcpIn)
if bcpOut is None:
bcpOut = (0, 0)
bcpOut = normalizers.normalizeCoordinateTuple(bcpOut)
self._insertBPoint(index=index, type=type, anchor=anchor,
bcpIn=bcpIn, bcpOut=bcpOut) | def function[insertBPoint, parameter[self, index, type, anchor, bcpIn, bcpOut, bPoint]]:
constant[
Insert a bPoint at index in the contour.
]
if compare[name[bPoint] is_not constant[None]] begin[:]
if compare[name[type] is constant[None]] begin[:]
variable[type] assign[=] name[bPoint].type
if compare[name[anchor] is constant[None]] begin[:]
variable[anchor] assign[=] name[bPoint].anchor
if compare[name[bcpIn] is constant[None]] begin[:]
variable[bcpIn] assign[=] name[bPoint].bcpIn
if compare[name[bcpOut] is constant[None]] begin[:]
variable[bcpOut] assign[=] name[bPoint].bcpOut
variable[index] assign[=] call[name[normalizers].normalizeIndex, parameter[name[index]]]
variable[type] assign[=] call[name[normalizers].normalizeBPointType, parameter[name[type]]]
variable[anchor] assign[=] call[name[normalizers].normalizeCoordinateTuple, parameter[name[anchor]]]
if compare[name[bcpIn] is constant[None]] begin[:]
variable[bcpIn] assign[=] tuple[[<ast.Constant object at 0x7da204960dc0>, <ast.Constant object at 0x7da204961390>]]
variable[bcpIn] assign[=] call[name[normalizers].normalizeCoordinateTuple, parameter[name[bcpIn]]]
if compare[name[bcpOut] is constant[None]] begin[:]
variable[bcpOut] assign[=] tuple[[<ast.Constant object at 0x7da204961900>, <ast.Constant object at 0x7da2049635b0>]]
variable[bcpOut] assign[=] call[name[normalizers].normalizeCoordinateTuple, parameter[name[bcpOut]]]
call[name[self]._insertBPoint, parameter[]] | keyword[def] identifier[insertBPoint] ( identifier[self] , identifier[index] , identifier[type] = keyword[None] , identifier[anchor] = keyword[None] , identifier[bcpIn] = keyword[None] , identifier[bcpOut] = keyword[None] , identifier[bPoint] = keyword[None] ):
literal[string]
keyword[if] identifier[bPoint] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[type] keyword[is] keyword[None] :
identifier[type] = identifier[bPoint] . identifier[type]
keyword[if] identifier[anchor] keyword[is] keyword[None] :
identifier[anchor] = identifier[bPoint] . identifier[anchor]
keyword[if] identifier[bcpIn] keyword[is] keyword[None] :
identifier[bcpIn] = identifier[bPoint] . identifier[bcpIn]
keyword[if] identifier[bcpOut] keyword[is] keyword[None] :
identifier[bcpOut] = identifier[bPoint] . identifier[bcpOut]
identifier[index] = identifier[normalizers] . identifier[normalizeIndex] ( identifier[index] )
identifier[type] = identifier[normalizers] . identifier[normalizeBPointType] ( identifier[type] )
identifier[anchor] = identifier[normalizers] . identifier[normalizeCoordinateTuple] ( identifier[anchor] )
keyword[if] identifier[bcpIn] keyword[is] keyword[None] :
identifier[bcpIn] =( literal[int] , literal[int] )
identifier[bcpIn] = identifier[normalizers] . identifier[normalizeCoordinateTuple] ( identifier[bcpIn] )
keyword[if] identifier[bcpOut] keyword[is] keyword[None] :
identifier[bcpOut] =( literal[int] , literal[int] )
identifier[bcpOut] = identifier[normalizers] . identifier[normalizeCoordinateTuple] ( identifier[bcpOut] )
identifier[self] . identifier[_insertBPoint] ( identifier[index] = identifier[index] , identifier[type] = identifier[type] , identifier[anchor] = identifier[anchor] ,
identifier[bcpIn] = identifier[bcpIn] , identifier[bcpOut] = identifier[bcpOut] ) | def insertBPoint(self, index, type=None, anchor=None, bcpIn=None, bcpOut=None, bPoint=None):
"""
Insert a bPoint at index in the contour.
"""
if bPoint is not None:
if type is None:
type = bPoint.type # depends on [control=['if'], data=['type']]
if anchor is None:
anchor = bPoint.anchor # depends on [control=['if'], data=['anchor']]
if bcpIn is None:
bcpIn = bPoint.bcpIn # depends on [control=['if'], data=['bcpIn']]
if bcpOut is None:
bcpOut = bPoint.bcpOut # depends on [control=['if'], data=['bcpOut']] # depends on [control=['if'], data=['bPoint']]
index = normalizers.normalizeIndex(index)
type = normalizers.normalizeBPointType(type)
anchor = normalizers.normalizeCoordinateTuple(anchor)
if bcpIn is None:
bcpIn = (0, 0) # depends on [control=['if'], data=['bcpIn']]
bcpIn = normalizers.normalizeCoordinateTuple(bcpIn)
if bcpOut is None:
bcpOut = (0, 0) # depends on [control=['if'], data=['bcpOut']]
bcpOut = normalizers.normalizeCoordinateTuple(bcpOut)
self._insertBPoint(index=index, type=type, anchor=anchor, bcpIn=bcpIn, bcpOut=bcpOut) |
def hash_pandas_object(obj, index=True, encoding='utf8', hash_key=None,
categorize=True):
"""
Return a data hash of the Index/Series/DataFrame
.. versionadded:: 0.19.2
Parameters
----------
index : boolean, default True
include the index in the hash (if Series/DataFrame)
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
Series of uint64, same length as the object
"""
from pandas import Series
if hash_key is None:
hash_key = _default_hash_key
if isinstance(obj, ABCMultiIndex):
return Series(hash_tuples(obj, encoding, hash_key),
dtype='uint64', copy=False)
if isinstance(obj, ABCIndexClass):
h = hash_array(obj.values, encoding, hash_key,
categorize).astype('uint64', copy=False)
h = Series(h, index=obj, dtype='uint64', copy=False)
elif isinstance(obj, ABCSeries):
h = hash_array(obj.values, encoding, hash_key,
categorize).astype('uint64', copy=False)
if index:
index_iter = (hash_pandas_object(obj.index,
index=False,
encoding=encoding,
hash_key=hash_key,
categorize=categorize).values
for _ in [None])
arrays = itertools.chain([h], index_iter)
h = _combine_hash_arrays(arrays, 2)
h = Series(h, index=obj.index, dtype='uint64', copy=False)
elif isinstance(obj, ABCDataFrame):
hashes = (hash_array(series.values) for _, series in obj.iteritems())
num_items = len(obj.columns)
if index:
index_hash_generator = (hash_pandas_object(obj.index,
index=False,
encoding=encoding,
hash_key=hash_key,
categorize=categorize).values # noqa
for _ in [None])
num_items += 1
hashes = itertools.chain(hashes, index_hash_generator)
h = _combine_hash_arrays(hashes, num_items)
h = Series(h, index=obj.index, dtype='uint64', copy=False)
else:
raise TypeError("Unexpected type for hashing %s" % type(obj))
return h | def function[hash_pandas_object, parameter[obj, index, encoding, hash_key, categorize]]:
constant[
Return a data hash of the Index/Series/DataFrame
.. versionadded:: 0.19.2
Parameters
----------
index : boolean, default True
include the index in the hash (if Series/DataFrame)
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
Series of uint64, same length as the object
]
from relative_module[pandas] import module[Series]
if compare[name[hash_key] is constant[None]] begin[:]
variable[hash_key] assign[=] name[_default_hash_key]
if call[name[isinstance], parameter[name[obj], name[ABCMultiIndex]]] begin[:]
return[call[name[Series], parameter[call[name[hash_tuples], parameter[name[obj], name[encoding], name[hash_key]]]]]]
if call[name[isinstance], parameter[name[obj], name[ABCIndexClass]]] begin[:]
variable[h] assign[=] call[call[name[hash_array], parameter[name[obj].values, name[encoding], name[hash_key], name[categorize]]].astype, parameter[constant[uint64]]]
variable[h] assign[=] call[name[Series], parameter[name[h]]]
return[name[h]] | keyword[def] identifier[hash_pandas_object] ( identifier[obj] , identifier[index] = keyword[True] , identifier[encoding] = literal[string] , identifier[hash_key] = keyword[None] ,
identifier[categorize] = keyword[True] ):
literal[string]
keyword[from] identifier[pandas] keyword[import] identifier[Series]
keyword[if] identifier[hash_key] keyword[is] keyword[None] :
identifier[hash_key] = identifier[_default_hash_key]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[ABCMultiIndex] ):
keyword[return] identifier[Series] ( identifier[hash_tuples] ( identifier[obj] , identifier[encoding] , identifier[hash_key] ),
identifier[dtype] = literal[string] , identifier[copy] = keyword[False] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[ABCIndexClass] ):
identifier[h] = identifier[hash_array] ( identifier[obj] . identifier[values] , identifier[encoding] , identifier[hash_key] ,
identifier[categorize] ). identifier[astype] ( literal[string] , identifier[copy] = keyword[False] )
identifier[h] = identifier[Series] ( identifier[h] , identifier[index] = identifier[obj] , identifier[dtype] = literal[string] , identifier[copy] = keyword[False] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[ABCSeries] ):
identifier[h] = identifier[hash_array] ( identifier[obj] . identifier[values] , identifier[encoding] , identifier[hash_key] ,
identifier[categorize] ). identifier[astype] ( literal[string] , identifier[copy] = keyword[False] )
keyword[if] identifier[index] :
identifier[index_iter] =( identifier[hash_pandas_object] ( identifier[obj] . identifier[index] ,
identifier[index] = keyword[False] ,
identifier[encoding] = identifier[encoding] ,
identifier[hash_key] = identifier[hash_key] ,
identifier[categorize] = identifier[categorize] ). identifier[values]
keyword[for] identifier[_] keyword[in] [ keyword[None] ])
identifier[arrays] = identifier[itertools] . identifier[chain] ([ identifier[h] ], identifier[index_iter] )
identifier[h] = identifier[_combine_hash_arrays] ( identifier[arrays] , literal[int] )
identifier[h] = identifier[Series] ( identifier[h] , identifier[index] = identifier[obj] . identifier[index] , identifier[dtype] = literal[string] , identifier[copy] = keyword[False] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[ABCDataFrame] ):
identifier[hashes] =( identifier[hash_array] ( identifier[series] . identifier[values] ) keyword[for] identifier[_] , identifier[series] keyword[in] identifier[obj] . identifier[iteritems] ())
identifier[num_items] = identifier[len] ( identifier[obj] . identifier[columns] )
keyword[if] identifier[index] :
identifier[index_hash_generator] =( identifier[hash_pandas_object] ( identifier[obj] . identifier[index] ,
identifier[index] = keyword[False] ,
identifier[encoding] = identifier[encoding] ,
identifier[hash_key] = identifier[hash_key] ,
identifier[categorize] = identifier[categorize] ). identifier[values]
keyword[for] identifier[_] keyword[in] [ keyword[None] ])
identifier[num_items] += literal[int]
identifier[hashes] = identifier[itertools] . identifier[chain] ( identifier[hashes] , identifier[index_hash_generator] )
identifier[h] = identifier[_combine_hash_arrays] ( identifier[hashes] , identifier[num_items] )
identifier[h] = identifier[Series] ( identifier[h] , identifier[index] = identifier[obj] . identifier[index] , identifier[dtype] = literal[string] , identifier[copy] = keyword[False] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[obj] ))
keyword[return] identifier[h] | def hash_pandas_object(obj, index=True, encoding='utf8', hash_key=None, categorize=True):
"""
Return a data hash of the Index/Series/DataFrame
.. versionadded:: 0.19.2
Parameters
----------
index : boolean, default True
include the index in the hash (if Series/DataFrame)
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
Series of uint64, same length as the object
"""
from pandas import Series
if hash_key is None:
hash_key = _default_hash_key # depends on [control=['if'], data=['hash_key']]
if isinstance(obj, ABCMultiIndex):
return Series(hash_tuples(obj, encoding, hash_key), dtype='uint64', copy=False) # depends on [control=['if'], data=[]]
if isinstance(obj, ABCIndexClass):
h = hash_array(obj.values, encoding, hash_key, categorize).astype('uint64', copy=False)
h = Series(h, index=obj, dtype='uint64', copy=False) # depends on [control=['if'], data=[]]
elif isinstance(obj, ABCSeries):
h = hash_array(obj.values, encoding, hash_key, categorize).astype('uint64', copy=False)
if index:
index_iter = (hash_pandas_object(obj.index, index=False, encoding=encoding, hash_key=hash_key, categorize=categorize).values for _ in [None])
arrays = itertools.chain([h], index_iter)
h = _combine_hash_arrays(arrays, 2) # depends on [control=['if'], data=[]]
h = Series(h, index=obj.index, dtype='uint64', copy=False) # depends on [control=['if'], data=[]]
elif isinstance(obj, ABCDataFrame):
hashes = (hash_array(series.values) for (_, series) in obj.iteritems())
num_items = len(obj.columns)
if index: # noqa
index_hash_generator = (hash_pandas_object(obj.index, index=False, encoding=encoding, hash_key=hash_key, categorize=categorize).values for _ in [None])
num_items += 1
hashes = itertools.chain(hashes, index_hash_generator) # depends on [control=['if'], data=[]]
h = _combine_hash_arrays(hashes, num_items)
h = Series(h, index=obj.index, dtype='uint64', copy=False) # depends on [control=['if'], data=[]]
else:
raise TypeError('Unexpected type for hashing %s' % type(obj))
return h |
def exclude(self, **attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k, v in attrs.items():
exclude = getattr(self, '_exclude_' + k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k, v) | def function[exclude, parameter[self]]:
constant[Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
]
for taget[tuple[[<ast.Name object at 0x7da1b1b18760>, <ast.Name object at 0x7da1b1b18bb0>]]] in starred[call[name[attrs].items, parameter[]]] begin[:]
variable[exclude] assign[=] call[name[getattr], parameter[name[self], binary_operation[constant[_exclude_] + name[k]], constant[None]]]
if name[exclude] begin[:]
call[name[exclude], parameter[name[v]]] | keyword[def] identifier[exclude] ( identifier[self] ,** identifier[attrs] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attrs] . identifier[items] ():
identifier[exclude] = identifier[getattr] ( identifier[self] , literal[string] + identifier[k] , keyword[None] )
keyword[if] identifier[exclude] :
identifier[exclude] ( identifier[v] )
keyword[else] :
identifier[self] . identifier[_exclude_misc] ( identifier[k] , identifier[v] ) | def exclude(self, **attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for (k, v) in attrs.items():
exclude = getattr(self, '_exclude_' + k, None)
if exclude:
exclude(v) # depends on [control=['if'], data=[]]
else:
self._exclude_misc(k, v) # depends on [control=['for'], data=[]] |
def ready_argument_list(self, arguments):
"""ready argument list to be passed to the C function
:param arguments: List of arguments to be passed to the C function.
The order should match the argument list on the C function.
Allowed values are numpy.ndarray, and/or numpy.int32, numpy.float32, and so on.
:type arguments: list(numpy objects)
:returns: A list of arguments that can be passed to the C function.
:rtype: list(Argument)
"""
ctype_args = [ None for _ in arguments]
for i, arg in enumerate(arguments):
if not isinstance(arg, (numpy.ndarray, numpy.number)):
raise TypeError("Argument is not numpy ndarray or numpy scalar %s" % type(arg))
dtype_str = str(arg.dtype)
data = arg.copy()
if isinstance(arg, numpy.ndarray):
if dtype_str in dtype_map.keys():
# In numpy <= 1.15, ndarray.ctypes.data_as does not itself keep a reference
# to its underlying array, so we need to store a reference to arg.copy()
# in the Argument object manually to avoid it being deleted.
# (This changed in numpy > 1.15.)
data_ctypes = data.ctypes.data_as(C.POINTER(dtype_map[dtype_str]))
else:
raise TypeError("unknown dtype for ndarray")
elif isinstance(arg, numpy.generic):
data_ctypes = dtype_map[dtype_str](arg)
ctype_args[i] = Argument(numpy=data, ctypes=data_ctypes)
return ctype_args | def function[ready_argument_list, parameter[self, arguments]]:
constant[ready argument list to be passed to the C function
:param arguments: List of arguments to be passed to the C function.
The order should match the argument list on the C function.
Allowed values are numpy.ndarray, and/or numpy.int32, numpy.float32, and so on.
:type arguments: list(numpy objects)
:returns: A list of arguments that can be passed to the C function.
:rtype: list(Argument)
]
variable[ctype_args] assign[=] <ast.ListComp object at 0x7da1b04db250>
for taget[tuple[[<ast.Name object at 0x7da1b04db400>, <ast.Name object at 0x7da1b04db3d0>]]] in starred[call[name[enumerate], parameter[name[arguments]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b04daa40> begin[:]
<ast.Raise object at 0x7da1b04d9b40>
variable[dtype_str] assign[=] call[name[str], parameter[name[arg].dtype]]
variable[data] assign[=] call[name[arg].copy, parameter[]]
if call[name[isinstance], parameter[name[arg], name[numpy].ndarray]] begin[:]
if compare[name[dtype_str] in call[name[dtype_map].keys, parameter[]]] begin[:]
variable[data_ctypes] assign[=] call[name[data].ctypes.data_as, parameter[call[name[C].POINTER, parameter[call[name[dtype_map]][name[dtype_str]]]]]]
call[name[ctype_args]][name[i]] assign[=] call[name[Argument], parameter[]]
return[name[ctype_args]] | keyword[def] identifier[ready_argument_list] ( identifier[self] , identifier[arguments] ):
literal[string]
identifier[ctype_args] =[ keyword[None] keyword[for] identifier[_] keyword[in] identifier[arguments] ]
keyword[for] identifier[i] , identifier[arg] keyword[in] identifier[enumerate] ( identifier[arguments] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[arg] ,( identifier[numpy] . identifier[ndarray] , identifier[numpy] . identifier[number] )):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[arg] ))
identifier[dtype_str] = identifier[str] ( identifier[arg] . identifier[dtype] )
identifier[data] = identifier[arg] . identifier[copy] ()
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[numpy] . identifier[ndarray] ):
keyword[if] identifier[dtype_str] keyword[in] identifier[dtype_map] . identifier[keys] ():
identifier[data_ctypes] = identifier[data] . identifier[ctypes] . identifier[data_as] ( identifier[C] . identifier[POINTER] ( identifier[dtype_map] [ identifier[dtype_str] ]))
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[numpy] . identifier[generic] ):
identifier[data_ctypes] = identifier[dtype_map] [ identifier[dtype_str] ]( identifier[arg] )
identifier[ctype_args] [ identifier[i] ]= identifier[Argument] ( identifier[numpy] = identifier[data] , identifier[ctypes] = identifier[data_ctypes] )
keyword[return] identifier[ctype_args] | def ready_argument_list(self, arguments):
"""ready argument list to be passed to the C function
:param arguments: List of arguments to be passed to the C function.
The order should match the argument list on the C function.
Allowed values are numpy.ndarray, and/or numpy.int32, numpy.float32, and so on.
:type arguments: list(numpy objects)
:returns: A list of arguments that can be passed to the C function.
:rtype: list(Argument)
"""
ctype_args = [None for _ in arguments]
for (i, arg) in enumerate(arguments):
if not isinstance(arg, (numpy.ndarray, numpy.number)):
raise TypeError('Argument is not numpy ndarray or numpy scalar %s' % type(arg)) # depends on [control=['if'], data=[]]
dtype_str = str(arg.dtype)
data = arg.copy()
if isinstance(arg, numpy.ndarray):
if dtype_str in dtype_map.keys():
# In numpy <= 1.15, ndarray.ctypes.data_as does not itself keep a reference
# to its underlying array, so we need to store a reference to arg.copy()
# in the Argument object manually to avoid it being deleted.
# (This changed in numpy > 1.15.)
data_ctypes = data.ctypes.data_as(C.POINTER(dtype_map[dtype_str])) # depends on [control=['if'], data=['dtype_str']]
else:
raise TypeError('unknown dtype for ndarray') # depends on [control=['if'], data=[]]
elif isinstance(arg, numpy.generic):
data_ctypes = dtype_map[dtype_str](arg) # depends on [control=['if'], data=[]]
ctype_args[i] = Argument(numpy=data, ctypes=data_ctypes) # depends on [control=['for'], data=[]]
return ctype_args |
def apply_plugins(plugin_names):
"""
This function should be used by code in the SQUAD core to trigger
functionality from plugins.
The ``plugin_names`` argument is list of plugins names to be used. Most
probably, you will want to pass the list of plugins enabled for a given
project, e.g. ``project.enabled_plugins``.
Example::
from squad.core.plugins import apply_plugins
# ...
for plugin in apply_plugins(project.enabled_plugins):
plugin.method(...)
"""
if plugin_names is None:
return
for p in plugin_names:
try:
plugin = get_plugin_instance(p)
yield(plugin)
except PluginNotFound:
pass | def function[apply_plugins, parameter[plugin_names]]:
constant[
This function should be used by code in the SQUAD core to trigger
functionality from plugins.
The ``plugin_names`` argument is list of plugins names to be used. Most
probably, you will want to pass the list of plugins enabled for a given
project, e.g. ``project.enabled_plugins``.
Example::
from squad.core.plugins import apply_plugins
# ...
for plugin in apply_plugins(project.enabled_plugins):
plugin.method(...)
]
if compare[name[plugin_names] is constant[None]] begin[:]
return[None]
for taget[name[p]] in starred[name[plugin_names]] begin[:]
<ast.Try object at 0x7da1b0c8a170> | keyword[def] identifier[apply_plugins] ( identifier[plugin_names] ):
literal[string]
keyword[if] identifier[plugin_names] keyword[is] keyword[None] :
keyword[return]
keyword[for] identifier[p] keyword[in] identifier[plugin_names] :
keyword[try] :
identifier[plugin] = identifier[get_plugin_instance] ( identifier[p] )
keyword[yield] ( identifier[plugin] )
keyword[except] identifier[PluginNotFound] :
keyword[pass] | def apply_plugins(plugin_names):
"""
This function should be used by code in the SQUAD core to trigger
functionality from plugins.
The ``plugin_names`` argument is list of plugins names to be used. Most
probably, you will want to pass the list of plugins enabled for a given
project, e.g. ``project.enabled_plugins``.
Example::
from squad.core.plugins import apply_plugins
# ...
for plugin in apply_plugins(project.enabled_plugins):
plugin.method(...)
"""
if plugin_names is None:
return # depends on [control=['if'], data=[]]
for p in plugin_names:
try:
plugin = get_plugin_instance(p)
yield plugin # depends on [control=['try'], data=[]]
except PluginNotFound:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['p']] |
def enable_extana_streaming(self, include_imu=False, enabled_sensors=SENSOR_ALL):
"""Configures and enables sensor data streaming from the SK8-ExtAna device.
By default this will cause the SK8 to only stream data from the analog
sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will
also send data from the internal IMU in the SK8.
NOTE: only one streaming mode can be active at any time, so e.g. if you
want to stream IMU data normally, you must disable SK8-ExtAna streaming first.
Args:
include_imu (bool): If False, only SK8-ExtAna packets will be streamed.
If True, the device will also stream data from the SK8's internal IMU.
enabled_sensors (int): If `include_imu` is True, this can be used to
select which IMU sensors will be active.
Returns:
bool. True if successful, False if an error occurred.
"""
if not self.dongle._enable_extana_streaming(self, include_imu, enabled_sensors):
logger.warn('Failed to enable SK8-ExtAna streaming!')
return False
# have to add IMU #0 to enabled_imus if include_imu is True
if include_imu:
self.enabled_imus = [0]
return True | def function[enable_extana_streaming, parameter[self, include_imu, enabled_sensors]]:
constant[Configures and enables sensor data streaming from the SK8-ExtAna device.
By default this will cause the SK8 to only stream data from the analog
sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will
also send data from the internal IMU in the SK8.
NOTE: only one streaming mode can be active at any time, so e.g. if you
want to stream IMU data normally, you must disable SK8-ExtAna streaming first.
Args:
include_imu (bool): If False, only SK8-ExtAna packets will be streamed.
If True, the device will also stream data from the SK8's internal IMU.
enabled_sensors (int): If `include_imu` is True, this can be used to
select which IMU sensors will be active.
Returns:
bool. True if successful, False if an error occurred.
]
if <ast.UnaryOp object at 0x7da1b1435930> begin[:]
call[name[logger].warn, parameter[constant[Failed to enable SK8-ExtAna streaming!]]]
return[constant[False]]
if name[include_imu] begin[:]
name[self].enabled_imus assign[=] list[[<ast.Constant object at 0x7da1b14e4d30>]]
return[constant[True]] | keyword[def] identifier[enable_extana_streaming] ( identifier[self] , identifier[include_imu] = keyword[False] , identifier[enabled_sensors] = identifier[SENSOR_ALL] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[dongle] . identifier[_enable_extana_streaming] ( identifier[self] , identifier[include_imu] , identifier[enabled_sensors] ):
identifier[logger] . identifier[warn] ( literal[string] )
keyword[return] keyword[False]
keyword[if] identifier[include_imu] :
identifier[self] . identifier[enabled_imus] =[ literal[int] ]
keyword[return] keyword[True] | def enable_extana_streaming(self, include_imu=False, enabled_sensors=SENSOR_ALL):
"""Configures and enables sensor data streaming from the SK8-ExtAna device.
By default this will cause the SK8 to only stream data from the analog
sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will
also send data from the internal IMU in the SK8.
NOTE: only one streaming mode can be active at any time, so e.g. if you
want to stream IMU data normally, you must disable SK8-ExtAna streaming first.
Args:
include_imu (bool): If False, only SK8-ExtAna packets will be streamed.
If True, the device will also stream data from the SK8's internal IMU.
enabled_sensors (int): If `include_imu` is True, this can be used to
select which IMU sensors will be active.
Returns:
bool. True if successful, False if an error occurred.
"""
if not self.dongle._enable_extana_streaming(self, include_imu, enabled_sensors):
logger.warn('Failed to enable SK8-ExtAna streaming!')
return False # depends on [control=['if'], data=[]]
# have to add IMU #0 to enabled_imus if include_imu is True
if include_imu:
self.enabled_imus = [0] # depends on [control=['if'], data=[]]
return True |
def restart_tools(self,
repo=None,
name=None,
groups=None,
branch='master',
version='HEAD',
main_cfg=False,
old_val='',
new_val=''):
"""
Restart necessary tools based on changes that have been made either to
vent.cfg or to vent.template. This includes tools that need to be
restarted because they depend on other tools that were changed.
"""
status = (True, None)
if not main_cfg:
try:
t_identifier = {'name': name,
'branch': branch,
'version': version}
result = Template(System().manifest).constrain_opts(t_identifier,
['running',
'link_name'])
tools = result[0]
tool = list(tools.keys())[0]
if ('running' in tools[tool] and
tools[tool]['running'] == 'yes'):
start_tools = [t_identifier]
dependent_tools = [tools[tool]['link_name']]
start_tools += Dependencies(dependent_tools)
# TODO
start_d = {}
for tool_identifier in start_tools:
self.clean(**tool_identifier)
start_d.update(self.prep_start(**tool_identifier)[1])
if start_d:
Tools().start(start_d, '', is_tool_d=True)
except Exception as e: # pragma: no cover
self.logger.error('Trouble restarting tool ' + name +
' because: ' + str(e))
status = (False, str(e))
else:
try:
# string manipulation to get tools into arrays
ext_start = old_val.find('[external-services]')
if ext_start >= 0:
ot_str = old_val[old_val.find('[external-services]') + 20:]
else:
ot_str = ''
old_tools = []
for old_tool in ot_str.split('\n'):
if old_tool != '':
old_tools.append(old_tool.split('=')[0].strip())
ext_start = new_val.find('[external-services]')
if ext_start >= 0:
nt_str = new_val[new_val.find('[external-services]') + 20:]
else:
nt_str = ''
new_tools = []
for new_tool in nt_str.split('\n'):
if new_tool != '':
new_tools.append(new_tool.split('=')[0].strip())
# find tools changed
tool_changes = []
for old_tool in old_tools:
if old_tool not in new_tools:
tool_changes.append(old_tool)
for new_tool in new_tools:
if new_tool not in old_tools:
tool_changes.append(new_tool)
else:
# tool name will be the same
oconf = old_val[old_val.find(new_tool):].split('\n')[0]
nconf = new_val[new_val.find(new_tool):].split('\n')[0]
if oconf != nconf:
tool_changes.append(new_tool)
# put link names in a dictionary for finding dependencies
dependent_tools = []
for i, entry in enumerate(tool_changes):
dependent_tools.append(entry)
# change names to lowercase for use in clean, prep_start
tool_changes[i] = {'name': entry.lower().replace('-', '_')}
dependencies = Dependencies(dependent_tools)
# restart tools
restart = tool_changes + dependencies
tool_d = {}
for tool in restart:
self.clean(**tool)
tool_d.update(self.prep_start(**tool)[1])
if tool_d:
# TODO fix the arguments
Tools().start(tool_d)
except Exception as e: # pragma: no cover
self.logger.error('Problem restarting tools: ' + str(e))
status = (False, str(e))
return status | def function[restart_tools, parameter[self, repo, name, groups, branch, version, main_cfg, old_val, new_val]]:
constant[
Restart necessary tools based on changes that have been made either to
vent.cfg or to vent.template. This includes tools that need to be
restarted because they depend on other tools that were changed.
]
variable[status] assign[=] tuple[[<ast.Constant object at 0x7da1b0e47e50>, <ast.Constant object at 0x7da1b0e47220>]]
if <ast.UnaryOp object at 0x7da1b0e46bc0> begin[:]
<ast.Try object at 0x7da1b0e458a0>
return[name[status]] | keyword[def] identifier[restart_tools] ( identifier[self] ,
identifier[repo] = keyword[None] ,
identifier[name] = keyword[None] ,
identifier[groups] = keyword[None] ,
identifier[branch] = literal[string] ,
identifier[version] = literal[string] ,
identifier[main_cfg] = keyword[False] ,
identifier[old_val] = literal[string] ,
identifier[new_val] = literal[string] ):
literal[string]
identifier[status] =( keyword[True] , keyword[None] )
keyword[if] keyword[not] identifier[main_cfg] :
keyword[try] :
identifier[t_identifier] ={ literal[string] : identifier[name] ,
literal[string] : identifier[branch] ,
literal[string] : identifier[version] }
identifier[result] = identifier[Template] ( identifier[System] (). identifier[manifest] ). identifier[constrain_opts] ( identifier[t_identifier] ,
[ literal[string] ,
literal[string] ])
identifier[tools] = identifier[result] [ literal[int] ]
identifier[tool] = identifier[list] ( identifier[tools] . identifier[keys] ())[ literal[int] ]
keyword[if] ( literal[string] keyword[in] identifier[tools] [ identifier[tool] ] keyword[and]
identifier[tools] [ identifier[tool] ][ literal[string] ]== literal[string] ):
identifier[start_tools] =[ identifier[t_identifier] ]
identifier[dependent_tools] =[ identifier[tools] [ identifier[tool] ][ literal[string] ]]
identifier[start_tools] += identifier[Dependencies] ( identifier[dependent_tools] )
identifier[start_d] ={}
keyword[for] identifier[tool_identifier] keyword[in] identifier[start_tools] :
identifier[self] . identifier[clean] (** identifier[tool_identifier] )
identifier[start_d] . identifier[update] ( identifier[self] . identifier[prep_start] (** identifier[tool_identifier] )[ literal[int] ])
keyword[if] identifier[start_d] :
identifier[Tools] (). identifier[start] ( identifier[start_d] , literal[string] , identifier[is_tool_d] = keyword[True] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[name] +
literal[string] + identifier[str] ( identifier[e] ))
identifier[status] =( keyword[False] , identifier[str] ( identifier[e] ))
keyword[else] :
keyword[try] :
identifier[ext_start] = identifier[old_val] . identifier[find] ( literal[string] )
keyword[if] identifier[ext_start] >= literal[int] :
identifier[ot_str] = identifier[old_val] [ identifier[old_val] . identifier[find] ( literal[string] )+ literal[int] :]
keyword[else] :
identifier[ot_str] = literal[string]
identifier[old_tools] =[]
keyword[for] identifier[old_tool] keyword[in] identifier[ot_str] . identifier[split] ( literal[string] ):
keyword[if] identifier[old_tool] != literal[string] :
identifier[old_tools] . identifier[append] ( identifier[old_tool] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ())
identifier[ext_start] = identifier[new_val] . identifier[find] ( literal[string] )
keyword[if] identifier[ext_start] >= literal[int] :
identifier[nt_str] = identifier[new_val] [ identifier[new_val] . identifier[find] ( literal[string] )+ literal[int] :]
keyword[else] :
identifier[nt_str] = literal[string]
identifier[new_tools] =[]
keyword[for] identifier[new_tool] keyword[in] identifier[nt_str] . identifier[split] ( literal[string] ):
keyword[if] identifier[new_tool] != literal[string] :
identifier[new_tools] . identifier[append] ( identifier[new_tool] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ())
identifier[tool_changes] =[]
keyword[for] identifier[old_tool] keyword[in] identifier[old_tools] :
keyword[if] identifier[old_tool] keyword[not] keyword[in] identifier[new_tools] :
identifier[tool_changes] . identifier[append] ( identifier[old_tool] )
keyword[for] identifier[new_tool] keyword[in] identifier[new_tools] :
keyword[if] identifier[new_tool] keyword[not] keyword[in] identifier[old_tools] :
identifier[tool_changes] . identifier[append] ( identifier[new_tool] )
keyword[else] :
identifier[oconf] = identifier[old_val] [ identifier[old_val] . identifier[find] ( identifier[new_tool] ):]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[nconf] = identifier[new_val] [ identifier[new_val] . identifier[find] ( identifier[new_tool] ):]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[oconf] != identifier[nconf] :
identifier[tool_changes] . identifier[append] ( identifier[new_tool] )
identifier[dependent_tools] =[]
keyword[for] identifier[i] , identifier[entry] keyword[in] identifier[enumerate] ( identifier[tool_changes] ):
identifier[dependent_tools] . identifier[append] ( identifier[entry] )
identifier[tool_changes] [ identifier[i] ]={ literal[string] : identifier[entry] . identifier[lower] (). identifier[replace] ( literal[string] , literal[string] )}
identifier[dependencies] = identifier[Dependencies] ( identifier[dependent_tools] )
identifier[restart] = identifier[tool_changes] + identifier[dependencies]
identifier[tool_d] ={}
keyword[for] identifier[tool] keyword[in] identifier[restart] :
identifier[self] . identifier[clean] (** identifier[tool] )
identifier[tool_d] . identifier[update] ( identifier[self] . identifier[prep_start] (** identifier[tool] )[ literal[int] ])
keyword[if] identifier[tool_d] :
identifier[Tools] (). identifier[start] ( identifier[tool_d] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[str] ( identifier[e] ))
identifier[status] =( keyword[False] , identifier[str] ( identifier[e] ))
keyword[return] identifier[status] | def restart_tools(self, repo=None, name=None, groups=None, branch='master', version='HEAD', main_cfg=False, old_val='', new_val=''):
"""
Restart necessary tools based on changes that have been made either to
vent.cfg or to vent.template. This includes tools that need to be
restarted because they depend on other tools that were changed.
"""
status = (True, None)
if not main_cfg:
try:
t_identifier = {'name': name, 'branch': branch, 'version': version}
result = Template(System().manifest).constrain_opts(t_identifier, ['running', 'link_name'])
tools = result[0]
tool = list(tools.keys())[0]
if 'running' in tools[tool] and tools[tool]['running'] == 'yes':
start_tools = [t_identifier]
dependent_tools = [tools[tool]['link_name']]
start_tools += Dependencies(dependent_tools)
# TODO
start_d = {}
for tool_identifier in start_tools:
self.clean(**tool_identifier)
start_d.update(self.prep_start(**tool_identifier)[1]) # depends on [control=['for'], data=['tool_identifier']]
if start_d:
Tools().start(start_d, '', is_tool_d=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e: # pragma: no cover
self.logger.error('Trouble restarting tool ' + name + ' because: ' + str(e))
status = (False, str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
else:
try:
# string manipulation to get tools into arrays
ext_start = old_val.find('[external-services]')
if ext_start >= 0:
ot_str = old_val[old_val.find('[external-services]') + 20:] # depends on [control=['if'], data=[]]
else:
ot_str = ''
old_tools = []
for old_tool in ot_str.split('\n'):
if old_tool != '':
old_tools.append(old_tool.split('=')[0].strip()) # depends on [control=['if'], data=['old_tool']] # depends on [control=['for'], data=['old_tool']]
ext_start = new_val.find('[external-services]')
if ext_start >= 0:
nt_str = new_val[new_val.find('[external-services]') + 20:] # depends on [control=['if'], data=[]]
else:
nt_str = ''
new_tools = []
for new_tool in nt_str.split('\n'):
if new_tool != '':
new_tools.append(new_tool.split('=')[0].strip()) # depends on [control=['if'], data=['new_tool']] # depends on [control=['for'], data=['new_tool']]
# find tools changed
tool_changes = []
for old_tool in old_tools:
if old_tool not in new_tools:
tool_changes.append(old_tool) # depends on [control=['if'], data=['old_tool']] # depends on [control=['for'], data=['old_tool']]
for new_tool in new_tools:
if new_tool not in old_tools:
tool_changes.append(new_tool) # depends on [control=['if'], data=['new_tool']]
else:
# tool name will be the same
oconf = old_val[old_val.find(new_tool):].split('\n')[0]
nconf = new_val[new_val.find(new_tool):].split('\n')[0]
if oconf != nconf:
tool_changes.append(new_tool) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['new_tool']]
# put link names in a dictionary for finding dependencies
dependent_tools = []
for (i, entry) in enumerate(tool_changes):
dependent_tools.append(entry)
# change names to lowercase for use in clean, prep_start
tool_changes[i] = {'name': entry.lower().replace('-', '_')} # depends on [control=['for'], data=[]]
dependencies = Dependencies(dependent_tools)
# restart tools
restart = tool_changes + dependencies
tool_d = {}
for tool in restart:
self.clean(**tool)
tool_d.update(self.prep_start(**tool)[1]) # depends on [control=['for'], data=['tool']]
if tool_d:
# TODO fix the arguments
Tools().start(tool_d) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e: # pragma: no cover
self.logger.error('Problem restarting tools: ' + str(e))
status = (False, str(e)) # depends on [control=['except'], data=['e']]
return status |
def python_to_couch(options):
"""
Translates query options from python style options into CouchDB/Cloudant
query options. For example ``{'include_docs': True}`` will
translate to ``{'include_docs': 'true'}``. Primarily meant for use by
code that formulates a query to retrieve results data from the
remote database, such as the database API convenience method
:func:`~cloudant.database.CouchDatabase.all_docs` or the View
:func:`~cloudant.view.View.__call__` callable, both used to retrieve data.
:param dict options: Python style parameters to be translated.
:returns: Dictionary of translated CouchDB/Cloudant query parameters
"""
translation = dict()
for key, val in iteritems_(options):
py_to_couch_validate(key, val)
translation.update(_py_to_couch_translate(key, val))
return translation | def function[python_to_couch, parameter[options]]:
constant[
Translates query options from python style options into CouchDB/Cloudant
query options. For example ``{'include_docs': True}`` will
translate to ``{'include_docs': 'true'}``. Primarily meant for use by
code that formulates a query to retrieve results data from the
remote database, such as the database API convenience method
:func:`~cloudant.database.CouchDatabase.all_docs` or the View
:func:`~cloudant.view.View.__call__` callable, both used to retrieve data.
:param dict options: Python style parameters to be translated.
:returns: Dictionary of translated CouchDB/Cloudant query parameters
]
variable[translation] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20e957550>, <ast.Name object at 0x7da20e957c70>]]] in starred[call[name[iteritems_], parameter[name[options]]]] begin[:]
call[name[py_to_couch_validate], parameter[name[key], name[val]]]
call[name[translation].update, parameter[call[name[_py_to_couch_translate], parameter[name[key], name[val]]]]]
return[name[translation]] | keyword[def] identifier[python_to_couch] ( identifier[options] ):
literal[string]
identifier[translation] = identifier[dict] ()
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[iteritems_] ( identifier[options] ):
identifier[py_to_couch_validate] ( identifier[key] , identifier[val] )
identifier[translation] . identifier[update] ( identifier[_py_to_couch_translate] ( identifier[key] , identifier[val] ))
keyword[return] identifier[translation] | def python_to_couch(options):
"""
Translates query options from python style options into CouchDB/Cloudant
query options. For example ``{'include_docs': True}`` will
translate to ``{'include_docs': 'true'}``. Primarily meant for use by
code that formulates a query to retrieve results data from the
remote database, such as the database API convenience method
:func:`~cloudant.database.CouchDatabase.all_docs` or the View
:func:`~cloudant.view.View.__call__` callable, both used to retrieve data.
:param dict options: Python style parameters to be translated.
:returns: Dictionary of translated CouchDB/Cloudant query parameters
"""
translation = dict()
for (key, val) in iteritems_(options):
py_to_couch_validate(key, val)
translation.update(_py_to_couch_translate(key, val)) # depends on [control=['for'], data=[]]
return translation |
def moebius(
num_twists=1, # How many twists are there in the 'paper'?
nl=60, # Number of nodes along the length of the strip
nw=11, # Number of nodes along the width of the strip (>= 2)
mode="classical",
):
"""Creates a simplistic triangular mesh on a slightly Möbius strip. The
Möbius strip here deviates slightly from the ordinary geometry in that it
is constructed in such a way that the two halves can be exchanged as to
allow better comparison with the pseudo-Möbius geometry.
The mode is either `'classical'` or `'smooth'`. The first is the classical
Möbius band parametrization, the latter a smoothed variant matching
`'pseudo'`.
"""
# The width of the strip
width = 1.0
scale = 10.0
# radius of the strip when flattened out
r = 1.0
# seam displacement
alpha0 = 0.0 # pi / 2
# How flat the strip will be.
# Positive values result in left-turning Möbius strips, negative in
# right-turning ones.
# Also influences the width of the strip.
flatness = 1.0
# Generate suitable ranges for parametrization
u_range = numpy.linspace(0.0, 2 * numpy.pi, num=nl, endpoint=False)
v_range = numpy.linspace(-0.5 * width, 0.5 * width, num=nw)
# Create the vertices. This is based on the parameterization
# of the Möbius strip as given in
# <http://en.wikipedia.org/wiki/M%C3%B6bius_strip#Geometry_and_topology>
sin_u = numpy.sin(u_range)
cos_u = numpy.cos(u_range)
alpha = num_twists * 0.5 * u_range + alpha0
sin_alpha = numpy.sin(alpha)
cos_alpha = numpy.cos(alpha)
if mode == "classical":
a = cos_alpha
b = sin_alpha
reverse_seam = num_twists % 2 == 1
elif mode == "smooth":
# The fundamental difference with the ordinary Möbius band here are the
# squares.
# It is also possible to to abs() the respective sines and cosines, but
# this results in a non-smooth manifold.
a = numpy.copysign(cos_alpha ** 2, cos_alpha)
b = numpy.copysign(sin_alpha ** 2, sin_alpha)
reverse_seam = num_twists % 2 == 1
else:
assert mode == "pseudo"
a = cos_alpha ** 2
b = sin_alpha ** 2
reverse_seam = False
nodes = (
scale
* numpy.array(
[
numpy.outer(a * cos_u, v_range) + r * cos_u[:, numpy.newaxis],
numpy.outer(a * sin_u, v_range) + r * sin_u[:, numpy.newaxis],
numpy.outer(b, v_range) * flatness,
]
)
.reshape(3, -1)
.T
)
elems = _create_elements(nl, nw, reverse_seam)
return nodes, elems | def function[moebius, parameter[num_twists, nl, nw, mode]]:
constant[Creates a simplistic triangular mesh on a slightly Möbius strip. The
Möbius strip here deviates slightly from the ordinary geometry in that it
is constructed in such a way that the two halves can be exchanged as to
allow better comparison with the pseudo-Möbius geometry.
The mode is either `'classical'` or `'smooth'`. The first is the classical
Möbius band parametrization, the latter a smoothed variant matching
`'pseudo'`.
]
variable[width] assign[=] constant[1.0]
variable[scale] assign[=] constant[10.0]
variable[r] assign[=] constant[1.0]
variable[alpha0] assign[=] constant[0.0]
variable[flatness] assign[=] constant[1.0]
variable[u_range] assign[=] call[name[numpy].linspace, parameter[constant[0.0], binary_operation[constant[2] * name[numpy].pi]]]
variable[v_range] assign[=] call[name[numpy].linspace, parameter[binary_operation[<ast.UnaryOp object at 0x7da2045648e0> * name[width]], binary_operation[constant[0.5] * name[width]]]]
variable[sin_u] assign[=] call[name[numpy].sin, parameter[name[u_range]]]
variable[cos_u] assign[=] call[name[numpy].cos, parameter[name[u_range]]]
variable[alpha] assign[=] binary_operation[binary_operation[binary_operation[name[num_twists] * constant[0.5]] * name[u_range]] + name[alpha0]]
variable[sin_alpha] assign[=] call[name[numpy].sin, parameter[name[alpha]]]
variable[cos_alpha] assign[=] call[name[numpy].cos, parameter[name[alpha]]]
if compare[name[mode] equal[==] constant[classical]] begin[:]
variable[a] assign[=] name[cos_alpha]
variable[b] assign[=] name[sin_alpha]
variable[reverse_seam] assign[=] compare[binary_operation[name[num_twists] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]]
variable[nodes] assign[=] binary_operation[name[scale] * call[call[name[numpy].array, parameter[list[[<ast.BinOp object at 0x7da18f58cdf0>, <ast.BinOp object at 0x7da18f58e9b0>, <ast.BinOp object at 0x7da18f58d7e0>]]]].reshape, parameter[constant[3], <ast.UnaryOp object at 0x7da18f58ccd0>]].T]
variable[elems] assign[=] call[name[_create_elements], parameter[name[nl], name[nw], name[reverse_seam]]]
return[tuple[[<ast.Name object at 0x7da18f58fb20>, <ast.Name object at 0x7da18f58db40>]]] | keyword[def] identifier[moebius] (
identifier[num_twists] = literal[int] ,
identifier[nl] = literal[int] ,
identifier[nw] = literal[int] ,
identifier[mode] = literal[string] ,
):
literal[string]
identifier[width] = literal[int]
identifier[scale] = literal[int]
identifier[r] = literal[int]
identifier[alpha0] = literal[int]
identifier[flatness] = literal[int]
identifier[u_range] = identifier[numpy] . identifier[linspace] ( literal[int] , literal[int] * identifier[numpy] . identifier[pi] , identifier[num] = identifier[nl] , identifier[endpoint] = keyword[False] )
identifier[v_range] = identifier[numpy] . identifier[linspace] (- literal[int] * identifier[width] , literal[int] * identifier[width] , identifier[num] = identifier[nw] )
identifier[sin_u] = identifier[numpy] . identifier[sin] ( identifier[u_range] )
identifier[cos_u] = identifier[numpy] . identifier[cos] ( identifier[u_range] )
identifier[alpha] = identifier[num_twists] * literal[int] * identifier[u_range] + identifier[alpha0]
identifier[sin_alpha] = identifier[numpy] . identifier[sin] ( identifier[alpha] )
identifier[cos_alpha] = identifier[numpy] . identifier[cos] ( identifier[alpha] )
keyword[if] identifier[mode] == literal[string] :
identifier[a] = identifier[cos_alpha]
identifier[b] = identifier[sin_alpha]
identifier[reverse_seam] = identifier[num_twists] % literal[int] == literal[int]
keyword[elif] identifier[mode] == literal[string] :
identifier[a] = identifier[numpy] . identifier[copysign] ( identifier[cos_alpha] ** literal[int] , identifier[cos_alpha] )
identifier[b] = identifier[numpy] . identifier[copysign] ( identifier[sin_alpha] ** literal[int] , identifier[sin_alpha] )
identifier[reverse_seam] = identifier[num_twists] % literal[int] == literal[int]
keyword[else] :
keyword[assert] identifier[mode] == literal[string]
identifier[a] = identifier[cos_alpha] ** literal[int]
identifier[b] = identifier[sin_alpha] ** literal[int]
identifier[reverse_seam] = keyword[False]
identifier[nodes] =(
identifier[scale]
* identifier[numpy] . identifier[array] (
[
identifier[numpy] . identifier[outer] ( identifier[a] * identifier[cos_u] , identifier[v_range] )+ identifier[r] * identifier[cos_u] [:, identifier[numpy] . identifier[newaxis] ],
identifier[numpy] . identifier[outer] ( identifier[a] * identifier[sin_u] , identifier[v_range] )+ identifier[r] * identifier[sin_u] [:, identifier[numpy] . identifier[newaxis] ],
identifier[numpy] . identifier[outer] ( identifier[b] , identifier[v_range] )* identifier[flatness] ,
]
)
. identifier[reshape] ( literal[int] ,- literal[int] )
. identifier[T]
)
identifier[elems] = identifier[_create_elements] ( identifier[nl] , identifier[nw] , identifier[reverse_seam] )
keyword[return] identifier[nodes] , identifier[elems] | def moebius(num_twists=1, nl=60, nw=11, mode='classical'): # How many twists are there in the 'paper'?
# Number of nodes along the length of the strip
# Number of nodes along the width of the strip (>= 2)
"Creates a simplistic triangular mesh on a slightly Möbius strip. The\n Möbius strip here deviates slightly from the ordinary geometry in that it\n is constructed in such a way that the two halves can be exchanged as to\n allow better comparison with the pseudo-Möbius geometry.\n\n The mode is either `'classical'` or `'smooth'`. The first is the classical\n Möbius band parametrization, the latter a smoothed variant matching\n `'pseudo'`.\n "
# The width of the strip
width = 1.0
scale = 10.0
# radius of the strip when flattened out
r = 1.0
# seam displacement
alpha0 = 0.0 # pi / 2
# How flat the strip will be.
# Positive values result in left-turning Möbius strips, negative in
# right-turning ones.
# Also influences the width of the strip.
flatness = 1.0
# Generate suitable ranges for parametrization
u_range = numpy.linspace(0.0, 2 * numpy.pi, num=nl, endpoint=False)
v_range = numpy.linspace(-0.5 * width, 0.5 * width, num=nw)
# Create the vertices. This is based on the parameterization
# of the Möbius strip as given in
# <http://en.wikipedia.org/wiki/M%C3%B6bius_strip#Geometry_and_topology>
sin_u = numpy.sin(u_range)
cos_u = numpy.cos(u_range)
alpha = num_twists * 0.5 * u_range + alpha0
sin_alpha = numpy.sin(alpha)
cos_alpha = numpy.cos(alpha)
if mode == 'classical':
a = cos_alpha
b = sin_alpha
reverse_seam = num_twists % 2 == 1 # depends on [control=['if'], data=[]]
elif mode == 'smooth':
# The fundamental difference with the ordinary Möbius band here are the
# squares.
# It is also possible to to abs() the respective sines and cosines, but
# this results in a non-smooth manifold.
a = numpy.copysign(cos_alpha ** 2, cos_alpha)
b = numpy.copysign(sin_alpha ** 2, sin_alpha)
reverse_seam = num_twists % 2 == 1 # depends on [control=['if'], data=[]]
else:
assert mode == 'pseudo'
a = cos_alpha ** 2
b = sin_alpha ** 2
reverse_seam = False
nodes = scale * numpy.array([numpy.outer(a * cos_u, v_range) + r * cos_u[:, numpy.newaxis], numpy.outer(a * sin_u, v_range) + r * sin_u[:, numpy.newaxis], numpy.outer(b, v_range) * flatness]).reshape(3, -1).T
elems = _create_elements(nl, nw, reverse_seam)
return (nodes, elems) |
def infer_doy_max(arr):
"""Return the largest doy allowed by calendar.
Parameters
----------
arr : xarray.DataArray
Array with `time` coordinate.
Returns
-------
int
The largest day of the year found in calendar.
"""
cal = arr.time.encoding.get('calendar', None)
if cal in calendars:
doy_max = calendars[cal]
else:
# If source is an array with no calendar information and whose length is not at least of full year,
# then this inference could be wrong (
doy_max = arr.time.dt.dayofyear.max().data
if len(arr.time) < 360:
raise ValueError("Cannot infer the calendar from a series less than a year long.")
if doy_max not in [360, 365, 366]:
raise ValueError("The target array's calendar is not recognized")
return doy_max | def function[infer_doy_max, parameter[arr]]:
constant[Return the largest doy allowed by calendar.
Parameters
----------
arr : xarray.DataArray
Array with `time` coordinate.
Returns
-------
int
The largest day of the year found in calendar.
]
variable[cal] assign[=] call[name[arr].time.encoding.get, parameter[constant[calendar], constant[None]]]
if compare[name[cal] in name[calendars]] begin[:]
variable[doy_max] assign[=] call[name[calendars]][name[cal]]
return[name[doy_max]] | keyword[def] identifier[infer_doy_max] ( identifier[arr] ):
literal[string]
identifier[cal] = identifier[arr] . identifier[time] . identifier[encoding] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[cal] keyword[in] identifier[calendars] :
identifier[doy_max] = identifier[calendars] [ identifier[cal] ]
keyword[else] :
identifier[doy_max] = identifier[arr] . identifier[time] . identifier[dt] . identifier[dayofyear] . identifier[max] (). identifier[data]
keyword[if] identifier[len] ( identifier[arr] . identifier[time] )< literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[doy_max] keyword[not] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[doy_max] | def infer_doy_max(arr):
"""Return the largest doy allowed by calendar.
Parameters
----------
arr : xarray.DataArray
Array with `time` coordinate.
Returns
-------
int
The largest day of the year found in calendar.
"""
cal = arr.time.encoding.get('calendar', None)
if cal in calendars:
doy_max = calendars[cal] # depends on [control=['if'], data=['cal', 'calendars']]
else:
# If source is an array with no calendar information and whose length is not at least of full year,
# then this inference could be wrong (
doy_max = arr.time.dt.dayofyear.max().data
if len(arr.time) < 360:
raise ValueError('Cannot infer the calendar from a series less than a year long.') # depends on [control=['if'], data=[]]
if doy_max not in [360, 365, 366]:
raise ValueError("The target array's calendar is not recognized") # depends on [control=['if'], data=[]]
return doy_max |
def authenticate(name, remote_addr, password, cert, key, verify_cert=True):
'''
Authenticate with a remote peer.
.. notes:
This function makes every time you run this a connection
to remote_addr, you better call this only once.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
password :
The PaSsW0rD
cert :
PEM Formatted SSL Zertifikate.
Examples:
/root/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
/root/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
name:
Ignore this. This is just here for salt.
'''
ret = {
'name': name,
'remote_addr': remote_addr,
'cert': cert,
'key': key,
'verify_cert': verify_cert
}
try:
client = __salt__['lxd.pylxd_client_get'](
remote_addr, cert, key, verify_cert
)
except SaltInvocationError as e:
return _error(ret, six.text_type(e))
except CommandExecutionError as e:
return _error(ret, six.text_type(e))
if client.trusted:
return _success(ret, "Already authenticated.")
try:
result = __salt__['lxd.authenticate'](
remote_addr, password, cert, key, verify_cert
)
except CommandExecutionError as e:
return _error(ret, six.text_type(e))
if result is not True:
return _error(
ret,
"Failed to authenticate with peer: {0}".format(remote_addr)
)
msg = "Successfully authenticated with peer: {0}".format(remote_addr)
ret['changes'] = msg
return _success(
ret,
msg
) | def function[authenticate, parameter[name, remote_addr, password, cert, key, verify_cert]]:
constant[
Authenticate with a remote peer.
.. notes:
This function makes every time you run this a connection
to remote_addr, you better call this only once.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
password :
The PaSsW0rD
cert :
PEM Formatted SSL Zertifikate.
Examples:
/root/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
/root/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
name:
Ignore this. This is just here for salt.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21f8430>, <ast.Constant object at 0x7da1b21f9840>, <ast.Constant object at 0x7da1b21fb910>, <ast.Constant object at 0x7da1b21fa0b0>, <ast.Constant object at 0x7da1b21fa1d0>], [<ast.Name object at 0x7da1b21f9750>, <ast.Name object at 0x7da1b21f8df0>, <ast.Name object at 0x7da1b21fa350>, <ast.Name object at 0x7da1b21f8a60>, <ast.Name object at 0x7da1b21f8e20>]]
<ast.Try object at 0x7da1b21f95d0>
if name[client].trusted begin[:]
return[call[name[_success], parameter[name[ret], constant[Already authenticated.]]]]
<ast.Try object at 0x7da1b208df00>
if compare[name[result] is_not constant[True]] begin[:]
return[call[name[_error], parameter[name[ret], call[constant[Failed to authenticate with peer: {0}].format, parameter[name[remote_addr]]]]]]
variable[msg] assign[=] call[constant[Successfully authenticated with peer: {0}].format, parameter[name[remote_addr]]]
call[name[ret]][constant[changes]] assign[=] name[msg]
return[call[name[_success], parameter[name[ret], name[msg]]]] | keyword[def] identifier[authenticate] ( identifier[name] , identifier[remote_addr] , identifier[password] , identifier[cert] , identifier[key] , identifier[verify_cert] = keyword[True] ):
literal[string]
identifier[ret] ={
literal[string] : identifier[name] ,
literal[string] : identifier[remote_addr] ,
literal[string] : identifier[cert] ,
literal[string] : identifier[key] ,
literal[string] : identifier[verify_cert]
}
keyword[try] :
identifier[client] = identifier[__salt__] [ literal[string] ](
identifier[remote_addr] , identifier[cert] , identifier[key] , identifier[verify_cert]
)
keyword[except] identifier[SaltInvocationError] keyword[as] identifier[e] :
keyword[return] identifier[_error] ( identifier[ret] , identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[e] :
keyword[return] identifier[_error] ( identifier[ret] , identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[if] identifier[client] . identifier[trusted] :
keyword[return] identifier[_success] ( identifier[ret] , literal[string] )
keyword[try] :
identifier[result] = identifier[__salt__] [ literal[string] ](
identifier[remote_addr] , identifier[password] , identifier[cert] , identifier[key] , identifier[verify_cert]
)
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[e] :
keyword[return] identifier[_error] ( identifier[ret] , identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[if] identifier[result] keyword[is] keyword[not] keyword[True] :
keyword[return] identifier[_error] (
identifier[ret] ,
literal[string] . identifier[format] ( identifier[remote_addr] )
)
identifier[msg] = literal[string] . identifier[format] ( identifier[remote_addr] )
identifier[ret] [ literal[string] ]= identifier[msg]
keyword[return] identifier[_success] (
identifier[ret] ,
identifier[msg]
) | def authenticate(name, remote_addr, password, cert, key, verify_cert=True):
"""
Authenticate with a remote peer.
.. notes:
This function makes every time you run this a connection
to remote_addr, you better call this only once.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
password :
The PaSsW0rD
cert :
PEM Formatted SSL Zertifikate.
Examples:
/root/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
/root/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
name:
Ignore this. This is just here for salt.
"""
ret = {'name': name, 'remote_addr': remote_addr, 'cert': cert, 'key': key, 'verify_cert': verify_cert}
try:
client = __salt__['lxd.pylxd_client_get'](remote_addr, cert, key, verify_cert) # depends on [control=['try'], data=[]]
except SaltInvocationError as e:
return _error(ret, six.text_type(e)) # depends on [control=['except'], data=['e']]
except CommandExecutionError as e:
return _error(ret, six.text_type(e)) # depends on [control=['except'], data=['e']]
if client.trusted:
return _success(ret, 'Already authenticated.') # depends on [control=['if'], data=[]]
try:
result = __salt__['lxd.authenticate'](remote_addr, password, cert, key, verify_cert) # depends on [control=['try'], data=[]]
except CommandExecutionError as e:
return _error(ret, six.text_type(e)) # depends on [control=['except'], data=['e']]
if result is not True:
return _error(ret, 'Failed to authenticate with peer: {0}'.format(remote_addr)) # depends on [control=['if'], data=[]]
msg = 'Successfully authenticated with peer: {0}'.format(remote_addr)
ret['changes'] = msg
return _success(ret, msg) |
def peek_step(self, val: ArrayValue,
sn: "DataNode") -> Tuple[Value, "DataNode"]:
"""Return entry value addressed by the receiver + its schema node.
Args:
val: Current value (array).
sn: Current schema node.
"""
try:
return (val[val.index(self.parse_value(sn))], sn)
except ValueError:
return None, sn | def function[peek_step, parameter[self, val, sn]]:
constant[Return entry value addressed by the receiver + its schema node.
Args:
val: Current value (array).
sn: Current schema node.
]
<ast.Try object at 0x7da1b052a950> | keyword[def] identifier[peek_step] ( identifier[self] , identifier[val] : identifier[ArrayValue] ,
identifier[sn] : literal[string] )-> identifier[Tuple] [ identifier[Value] , literal[string] ]:
literal[string]
keyword[try] :
keyword[return] ( identifier[val] [ identifier[val] . identifier[index] ( identifier[self] . identifier[parse_value] ( identifier[sn] ))], identifier[sn] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[None] , identifier[sn] | def peek_step(self, val: ArrayValue, sn: 'DataNode') -> Tuple[Value, 'DataNode']:
"""Return entry value addressed by the receiver + its schema node.
Args:
val: Current value (array).
sn: Current schema node.
"""
try:
return (val[val.index(self.parse_value(sn))], sn) # depends on [control=['try'], data=[]]
except ValueError:
return (None, sn) # depends on [control=['except'], data=[]] |
def send_error(self, code, message=None):
"""Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
shortmsg, longmsg = self.responses[code]
except KeyError:
shortmsg, longmsg = '???', '???'
if message is None:
message = shortmsg
explain = longmsg
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
self.wfile.write(content.encode('UTF-8', 'replace')) | def function[send_error, parameter[self, code, message]]:
constant[Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
]
<ast.Try object at 0x7da18f00d060>
if compare[name[message] is constant[None]] begin[:]
variable[message] assign[=] name[shortmsg]
variable[explain] assign[=] name[longmsg]
call[name[self].log_error, parameter[constant[code %d, message %s], name[code], name[message]]]
variable[content] assign[=] binary_operation[name[self].error_message_format <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da18dc98160>, <ast.Constant object at 0x7da18dc9ac80>, <ast.Constant object at 0x7da18dc9b520>], [<ast.Name object at 0x7da18dc98c40>, <ast.Call object at 0x7da18dc9aec0>, <ast.Name object at 0x7da18dc99540>]]]
call[name[self].send_response, parameter[name[code], name[message]]]
call[name[self].send_header, parameter[constant[Content-Type], name[self].error_content_type]]
call[name[self].send_header, parameter[constant[Connection], constant[close]]]
call[name[self].end_headers, parameter[]]
if <ast.BoolOp object at 0x7da18dc99240> begin[:]
call[name[self].wfile.write, parameter[call[name[content].encode, parameter[constant[UTF-8], constant[replace]]]]] | keyword[def] identifier[send_error] ( identifier[self] , identifier[code] , identifier[message] = keyword[None] ):
literal[string]
keyword[try] :
identifier[shortmsg] , identifier[longmsg] = identifier[self] . identifier[responses] [ identifier[code] ]
keyword[except] identifier[KeyError] :
identifier[shortmsg] , identifier[longmsg] = literal[string] , literal[string]
keyword[if] identifier[message] keyword[is] keyword[None] :
identifier[message] = identifier[shortmsg]
identifier[explain] = identifier[longmsg]
identifier[self] . identifier[log_error] ( literal[string] , identifier[code] , identifier[message] )
identifier[content] =( identifier[self] . identifier[error_message_format] %
{ literal[string] : identifier[code] , literal[string] : identifier[_quote_html] ( identifier[message] ), literal[string] : identifier[explain] })
identifier[self] . identifier[send_response] ( identifier[code] , identifier[message] )
identifier[self] . identifier[send_header] ( literal[string] , identifier[self] . identifier[error_content_type] )
identifier[self] . identifier[send_header] ( literal[string] , literal[string] )
identifier[self] . identifier[end_headers] ()
keyword[if] identifier[self] . identifier[command] != literal[string] keyword[and] identifier[code] >= literal[int] keyword[and] identifier[code] keyword[not] keyword[in] ( literal[int] , literal[int] ):
identifier[self] . identifier[wfile] . identifier[write] ( identifier[content] . identifier[encode] ( literal[string] , literal[string] )) | def send_error(self, code, message=None):
"""Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
(shortmsg, longmsg) = self.responses[code] # depends on [control=['try'], data=[]]
except KeyError:
(shortmsg, longmsg) = ('???', '???') # depends on [control=['except'], data=[]]
if message is None:
message = shortmsg # depends on [control=['if'], data=['message']]
explain = longmsg
self.log_error('code %d, message %s', code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = self.error_message_format % {'code': code, 'message': _quote_html(message), 'explain': explain}
self.send_response(code, message)
self.send_header('Content-Type', self.error_content_type)
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and (code not in (204, 304)):
self.wfile.write(content.encode('UTF-8', 'replace')) # depends on [control=['if'], data=[]] |
def run_decider_state(self, decider_state, child_errors, final_outcomes_dict):
""" Runs the decider state of the barrier concurrency state. The decider state decides on which outcome the
barrier concurrency is left.
:param decider_state: the decider state of the barrier concurrency state
:param child_errors: error of the concurrent branches
:param final_outcomes_dict: dictionary of all outcomes of the concurrent branches
:return:
"""
decider_state.state_execution_status = StateExecutionStatus.ACTIVE
# forward the decider specific data
decider_state.child_errors = child_errors
decider_state.final_outcomes_dict = final_outcomes_dict
# standard state execution
decider_state.input_data = self.get_inputs_for_state(decider_state)
decider_state.output_data = self.create_output_dictionary_for_state(decider_state)
decider_state.start(self.execution_history, backward_execution=False)
decider_state.join()
decider_state_error = None
if decider_state.final_outcome.outcome_id == -1:
if 'error' in decider_state.output_data:
decider_state_error = decider_state.output_data['error']
# standard output data processing
self.add_state_execution_output_to_scoped_data(decider_state.output_data, decider_state)
self.update_scoped_variables_with_output_dictionary(decider_state.output_data, decider_state)
return decider_state_error | def function[run_decider_state, parameter[self, decider_state, child_errors, final_outcomes_dict]]:
constant[ Runs the decider state of the barrier concurrency state. The decider state decides on which outcome the
barrier concurrency is left.
:param decider_state: the decider state of the barrier concurrency state
:param child_errors: error of the concurrent branches
:param final_outcomes_dict: dictionary of all outcomes of the concurrent branches
:return:
]
name[decider_state].state_execution_status assign[=] name[StateExecutionStatus].ACTIVE
name[decider_state].child_errors assign[=] name[child_errors]
name[decider_state].final_outcomes_dict assign[=] name[final_outcomes_dict]
name[decider_state].input_data assign[=] call[name[self].get_inputs_for_state, parameter[name[decider_state]]]
name[decider_state].output_data assign[=] call[name[self].create_output_dictionary_for_state, parameter[name[decider_state]]]
call[name[decider_state].start, parameter[name[self].execution_history]]
call[name[decider_state].join, parameter[]]
variable[decider_state_error] assign[=] constant[None]
if compare[name[decider_state].final_outcome.outcome_id equal[==] <ast.UnaryOp object at 0x7da1b1b5ac50>] begin[:]
if compare[constant[error] in name[decider_state].output_data] begin[:]
variable[decider_state_error] assign[=] call[name[decider_state].output_data][constant[error]]
call[name[self].add_state_execution_output_to_scoped_data, parameter[name[decider_state].output_data, name[decider_state]]]
call[name[self].update_scoped_variables_with_output_dictionary, parameter[name[decider_state].output_data, name[decider_state]]]
return[name[decider_state_error]] | keyword[def] identifier[run_decider_state] ( identifier[self] , identifier[decider_state] , identifier[child_errors] , identifier[final_outcomes_dict] ):
literal[string]
identifier[decider_state] . identifier[state_execution_status] = identifier[StateExecutionStatus] . identifier[ACTIVE]
identifier[decider_state] . identifier[child_errors] = identifier[child_errors]
identifier[decider_state] . identifier[final_outcomes_dict] = identifier[final_outcomes_dict]
identifier[decider_state] . identifier[input_data] = identifier[self] . identifier[get_inputs_for_state] ( identifier[decider_state] )
identifier[decider_state] . identifier[output_data] = identifier[self] . identifier[create_output_dictionary_for_state] ( identifier[decider_state] )
identifier[decider_state] . identifier[start] ( identifier[self] . identifier[execution_history] , identifier[backward_execution] = keyword[False] )
identifier[decider_state] . identifier[join] ()
identifier[decider_state_error] = keyword[None]
keyword[if] identifier[decider_state] . identifier[final_outcome] . identifier[outcome_id] ==- literal[int] :
keyword[if] literal[string] keyword[in] identifier[decider_state] . identifier[output_data] :
identifier[decider_state_error] = identifier[decider_state] . identifier[output_data] [ literal[string] ]
identifier[self] . identifier[add_state_execution_output_to_scoped_data] ( identifier[decider_state] . identifier[output_data] , identifier[decider_state] )
identifier[self] . identifier[update_scoped_variables_with_output_dictionary] ( identifier[decider_state] . identifier[output_data] , identifier[decider_state] )
keyword[return] identifier[decider_state_error] | def run_decider_state(self, decider_state, child_errors, final_outcomes_dict):
""" Runs the decider state of the barrier concurrency state. The decider state decides on which outcome the
barrier concurrency is left.
:param decider_state: the decider state of the barrier concurrency state
:param child_errors: error of the concurrent branches
:param final_outcomes_dict: dictionary of all outcomes of the concurrent branches
:return:
"""
decider_state.state_execution_status = StateExecutionStatus.ACTIVE
# forward the decider specific data
decider_state.child_errors = child_errors
decider_state.final_outcomes_dict = final_outcomes_dict
# standard state execution
decider_state.input_data = self.get_inputs_for_state(decider_state)
decider_state.output_data = self.create_output_dictionary_for_state(decider_state)
decider_state.start(self.execution_history, backward_execution=False)
decider_state.join()
decider_state_error = None
if decider_state.final_outcome.outcome_id == -1:
if 'error' in decider_state.output_data:
decider_state_error = decider_state.output_data['error'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# standard output data processing
self.add_state_execution_output_to_scoped_data(decider_state.output_data, decider_state)
self.update_scoped_variables_with_output_dictionary(decider_state.output_data, decider_state)
return decider_state_error |
def _tee_output(make_proc, stdout=None, stderr=None, backend='auto'):
"""
Simultaneously reports and captures stdout and stderr from a process
subprocess must be created using (stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
"""
logged_out = []
logged_err = []
if backend == 'auto':
# backend = 'select' if POSIX else 'thread'
backend = 'thread'
if backend == 'select':
if not POSIX: # nocover
raise NotImplementedError('select is only available on posix')
# the select-based version is stable, but slow
_proc_iteroutput = _proc_iteroutput_select
elif backend == 'thread':
# the thread version is fast, but might run into issues.
_proc_iteroutput = _proc_iteroutput_thread
else:
raise ValueError('backend must be select, thread, or auto')
proc = make_proc()
for oline, eline in _proc_iteroutput(proc):
if oline:
if stdout: # pragma: nobranch
stdout.write(oline)
stdout.flush()
logged_out.append(oline)
if eline:
if stderr: # pragma: nobranch
stderr.write(eline)
stderr.flush()
logged_err.append(eline)
return proc, logged_out, logged_err | def function[_tee_output, parameter[make_proc, stdout, stderr, backend]]:
constant[
Simultaneously reports and captures stdout and stderr from a process
subprocess must be created using (stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
]
variable[logged_out] assign[=] list[[]]
variable[logged_err] assign[=] list[[]]
if compare[name[backend] equal[==] constant[auto]] begin[:]
variable[backend] assign[=] constant[thread]
if compare[name[backend] equal[==] constant[select]] begin[:]
if <ast.UnaryOp object at 0x7da1b020df30> begin[:]
<ast.Raise object at 0x7da1b020e8c0>
variable[_proc_iteroutput] assign[=] name[_proc_iteroutput_select]
variable[proc] assign[=] call[name[make_proc], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b020c640>, <ast.Name object at 0x7da1b020c0a0>]]] in starred[call[name[_proc_iteroutput], parameter[name[proc]]]] begin[:]
if name[oline] begin[:]
if name[stdout] begin[:]
call[name[stdout].write, parameter[name[oline]]]
call[name[stdout].flush, parameter[]]
call[name[logged_out].append, parameter[name[oline]]]
if name[eline] begin[:]
if name[stderr] begin[:]
call[name[stderr].write, parameter[name[eline]]]
call[name[stderr].flush, parameter[]]
call[name[logged_err].append, parameter[name[eline]]]
return[tuple[[<ast.Name object at 0x7da1b020e830>, <ast.Name object at 0x7da1b020cdf0>, <ast.Name object at 0x7da1b020e950>]]] | keyword[def] identifier[_tee_output] ( identifier[make_proc] , identifier[stdout] = keyword[None] , identifier[stderr] = keyword[None] , identifier[backend] = literal[string] ):
literal[string]
identifier[logged_out] =[]
identifier[logged_err] =[]
keyword[if] identifier[backend] == literal[string] :
identifier[backend] = literal[string]
keyword[if] identifier[backend] == literal[string] :
keyword[if] keyword[not] identifier[POSIX] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[_proc_iteroutput] = identifier[_proc_iteroutput_select]
keyword[elif] identifier[backend] == literal[string] :
identifier[_proc_iteroutput] = identifier[_proc_iteroutput_thread]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[proc] = identifier[make_proc] ()
keyword[for] identifier[oline] , identifier[eline] keyword[in] identifier[_proc_iteroutput] ( identifier[proc] ):
keyword[if] identifier[oline] :
keyword[if] identifier[stdout] :
identifier[stdout] . identifier[write] ( identifier[oline] )
identifier[stdout] . identifier[flush] ()
identifier[logged_out] . identifier[append] ( identifier[oline] )
keyword[if] identifier[eline] :
keyword[if] identifier[stderr] :
identifier[stderr] . identifier[write] ( identifier[eline] )
identifier[stderr] . identifier[flush] ()
identifier[logged_err] . identifier[append] ( identifier[eline] )
keyword[return] identifier[proc] , identifier[logged_out] , identifier[logged_err] | def _tee_output(make_proc, stdout=None, stderr=None, backend='auto'):
"""
Simultaneously reports and captures stdout and stderr from a process
subprocess must be created using (stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
"""
logged_out = []
logged_err = []
if backend == 'auto':
# backend = 'select' if POSIX else 'thread'
backend = 'thread' # depends on [control=['if'], data=['backend']]
if backend == 'select':
if not POSIX: # nocover
raise NotImplementedError('select is only available on posix') # depends on [control=['if'], data=[]]
# the select-based version is stable, but slow
_proc_iteroutput = _proc_iteroutput_select # depends on [control=['if'], data=[]]
elif backend == 'thread':
# the thread version is fast, but might run into issues.
_proc_iteroutput = _proc_iteroutput_thread # depends on [control=['if'], data=[]]
else:
raise ValueError('backend must be select, thread, or auto')
proc = make_proc()
for (oline, eline) in _proc_iteroutput(proc):
if oline:
if stdout: # pragma: nobranch
stdout.write(oline)
stdout.flush() # depends on [control=['if'], data=[]]
logged_out.append(oline) # depends on [control=['if'], data=[]]
if eline:
if stderr: # pragma: nobranch
stderr.write(eline)
stderr.flush() # depends on [control=['if'], data=[]]
logged_err.append(eline) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (proc, logged_out, logged_err) |
def render_reply(self):
"""
Create a :class:`~.Data` object equal to the object from which the from
was created through :meth:`from_xso`, except that the values of the
fields are exchanged with the values set on the form.
Fields which have no corresponding form descriptor are left untouched.
Fields which are accessible through form descriptors, but are not in
the original :class:`~.Data` are not included in the output.
This method only works on forms created through :meth:`from_xso`.
The resulting :class:`~.Data` instance has the :attr:`~.Data.type_` set
to :attr:`~.DataType.SUBMIT`.
"""
data = copy.copy(self._recv_xso)
data.type_ = forms_xso.DataType.SUBMIT
data.fields = list(self._recv_xso.fields)
for i, field_xso in enumerate(data.fields):
if field_xso.var is None:
continue
if field_xso.var == "FORM_TYPE":
continue
key = fields.descriptor_ns, field_xso.var
try:
descriptor = self.DESCRIPTOR_MAP[key]
except KeyError:
continue
bound_field = descriptor.__get__(self, type(self))
data.fields[i] = bound_field.render(
use_local_metadata=False
)
return data | def function[render_reply, parameter[self]]:
constant[
Create a :class:`~.Data` object equal to the object from which the from
was created through :meth:`from_xso`, except that the values of the
fields are exchanged with the values set on the form.
Fields which have no corresponding form descriptor are left untouched.
Fields which are accessible through form descriptors, but are not in
the original :class:`~.Data` are not included in the output.
This method only works on forms created through :meth:`from_xso`.
The resulting :class:`~.Data` instance has the :attr:`~.Data.type_` set
to :attr:`~.DataType.SUBMIT`.
]
variable[data] assign[=] call[name[copy].copy, parameter[name[self]._recv_xso]]
name[data].type_ assign[=] name[forms_xso].DataType.SUBMIT
name[data].fields assign[=] call[name[list], parameter[name[self]._recv_xso.fields]]
for taget[tuple[[<ast.Name object at 0x7da20c6aa200>, <ast.Name object at 0x7da20c6aaec0>]]] in starred[call[name[enumerate], parameter[name[data].fields]]] begin[:]
if compare[name[field_xso].var is constant[None]] begin[:]
continue
if compare[name[field_xso].var equal[==] constant[FORM_TYPE]] begin[:]
continue
variable[key] assign[=] tuple[[<ast.Attribute object at 0x7da20c795960>, <ast.Attribute object at 0x7da20c796530>]]
<ast.Try object at 0x7da20c795c90>
variable[bound_field] assign[=] call[name[descriptor].__get__, parameter[name[self], call[name[type], parameter[name[self]]]]]
call[name[data].fields][name[i]] assign[=] call[name[bound_field].render, parameter[]]
return[name[data]] | keyword[def] identifier[render_reply] ( identifier[self] ):
literal[string]
identifier[data] = identifier[copy] . identifier[copy] ( identifier[self] . identifier[_recv_xso] )
identifier[data] . identifier[type_] = identifier[forms_xso] . identifier[DataType] . identifier[SUBMIT]
identifier[data] . identifier[fields] = identifier[list] ( identifier[self] . identifier[_recv_xso] . identifier[fields] )
keyword[for] identifier[i] , identifier[field_xso] keyword[in] identifier[enumerate] ( identifier[data] . identifier[fields] ):
keyword[if] identifier[field_xso] . identifier[var] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[field_xso] . identifier[var] == literal[string] :
keyword[continue]
identifier[key] = identifier[fields] . identifier[descriptor_ns] , identifier[field_xso] . identifier[var]
keyword[try] :
identifier[descriptor] = identifier[self] . identifier[DESCRIPTOR_MAP] [ identifier[key] ]
keyword[except] identifier[KeyError] :
keyword[continue]
identifier[bound_field] = identifier[descriptor] . identifier[__get__] ( identifier[self] , identifier[type] ( identifier[self] ))
identifier[data] . identifier[fields] [ identifier[i] ]= identifier[bound_field] . identifier[render] (
identifier[use_local_metadata] = keyword[False]
)
keyword[return] identifier[data] | def render_reply(self):
"""
Create a :class:`~.Data` object equal to the object from which the from
was created through :meth:`from_xso`, except that the values of the
fields are exchanged with the values set on the form.
Fields which have no corresponding form descriptor are left untouched.
Fields which are accessible through form descriptors, but are not in
the original :class:`~.Data` are not included in the output.
This method only works on forms created through :meth:`from_xso`.
The resulting :class:`~.Data` instance has the :attr:`~.Data.type_` set
to :attr:`~.DataType.SUBMIT`.
"""
data = copy.copy(self._recv_xso)
data.type_ = forms_xso.DataType.SUBMIT
data.fields = list(self._recv_xso.fields)
for (i, field_xso) in enumerate(data.fields):
if field_xso.var is None:
continue # depends on [control=['if'], data=[]]
if field_xso.var == 'FORM_TYPE':
continue # depends on [control=['if'], data=[]]
key = (fields.descriptor_ns, field_xso.var)
try:
descriptor = self.DESCRIPTOR_MAP[key] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]]
bound_field = descriptor.__get__(self, type(self))
data.fields[i] = bound_field.render(use_local_metadata=False) # depends on [control=['for'], data=[]]
return data |
def _set_defaults(self):
guide._set_defaults(self)
nbreak = len(self.key)
# rows and columns
if self.nrow is not None and self.ncol is not None:
if guide.nrow * guide.ncol < nbreak:
raise PlotnineError(
"nrow x ncol need to be larger",
"than the number of breaks")
if self.nrow is None and self.ncol is None:
if self.direction == 'horizontal':
self.nrow = int(np.ceil(nbreak/5))
else:
self.ncol = int(np.ceil(nbreak/20))
self.nrow = self.nrow or int(np.ceil(nbreak/self.ncol))
self.ncol = self.ncol or int(np.ceil(nbreak/self.nrow))
# key width and key height for each legend entry
#
# Take a peak into data['size'] to make sure the
# legend dimensions are big enough
"""
>>> gg = ggplot(diamonds, aes(x='cut', y='clarity'))
>>> gg = gg + stat_sum(aes(group='cut'))
>>> gg + scale_size(range=(3, 25))
Note the different height sizes for the entries
"""
# FIXME: This should be in the geom instead of having
# special case conditional branches
def determine_side_length(initial_size):
default_pad = initial_size * 0.5
# default_pad = 0
size = np.ones(nbreak) * initial_size
for i in range(nbreak):
for gl in self.glayers:
_size = 0
pad = default_pad
# Full size of object to appear in the
# legend key
with suppress(IndexError):
if 'size' in gl.data:
_size = gl.data['size'].iloc[i] * SIZE_FACTOR
if 'stroke' in gl.data:
_size += (2 * gl.data['stroke'].iloc[i] *
SIZE_FACTOR)
# special case, color does not apply to
# border/linewidth
if issubclass(gl.geom, geom_text):
pad = 0
if _size < initial_size:
continue
try:
# color(edgecolor) affects size(linewidth)
# When the edge is not visible, we should
# not expand the size of the keys
if gl.data['color'].iloc[i] is not None:
size[i] = np.max([_size+pad, size[i]])
except KeyError:
break
return size
# keysize
if self.keywidth is None:
width = determine_side_length(
self._default('legend_key_width', 18))
if self.direction == 'vertical':
width[:] = width.max()
self._keywidth = width
else:
self._keywidth = [self.keywidth]*nbreak
if self.keyheight is None:
height = determine_side_length(
self._default('legend_key_height', 18))
if self.direction == 'horizontal':
height[:] = height.max()
self._keyheight = height
else:
self._keyheight = [self.keyheight]*nbreak | def function[_set_defaults, parameter[self]]:
call[name[guide]._set_defaults, parameter[name[self]]]
variable[nbreak] assign[=] call[name[len], parameter[name[self].key]]
if <ast.BoolOp object at 0x7da18f723550> begin[:]
if compare[binary_operation[name[guide].nrow * name[guide].ncol] less[<] name[nbreak]] begin[:]
<ast.Raise object at 0x7da18f721a80>
if <ast.BoolOp object at 0x7da18f721ed0> begin[:]
if compare[name[self].direction equal[==] constant[horizontal]] begin[:]
name[self].nrow assign[=] call[name[int], parameter[call[name[np].ceil, parameter[binary_operation[name[nbreak] / constant[5]]]]]]
name[self].nrow assign[=] <ast.BoolOp object at 0x7da18f7221a0>
name[self].ncol assign[=] <ast.BoolOp object at 0x7da18f7234c0>
constant[
>>> gg = ggplot(diamonds, aes(x='cut', y='clarity'))
>>> gg = gg + stat_sum(aes(group='cut'))
>>> gg + scale_size(range=(3, 25))
Note the different height sizes for the entries
]
def function[determine_side_length, parameter[initial_size]]:
variable[default_pad] assign[=] binary_operation[name[initial_size] * constant[0.5]]
variable[size] assign[=] binary_operation[call[name[np].ones, parameter[name[nbreak]]] * name[initial_size]]
for taget[name[i]] in starred[call[name[range], parameter[name[nbreak]]]] begin[:]
for taget[name[gl]] in starred[name[self].glayers] begin[:]
variable[_size] assign[=] constant[0]
variable[pad] assign[=] name[default_pad]
with call[name[suppress], parameter[name[IndexError]]] begin[:]
if compare[constant[size] in name[gl].data] begin[:]
variable[_size] assign[=] binary_operation[call[call[name[gl].data][constant[size]].iloc][name[i]] * name[SIZE_FACTOR]]
if compare[constant[stroke] in name[gl].data] begin[:]
<ast.AugAssign object at 0x7da18f722950>
if call[name[issubclass], parameter[name[gl].geom, name[geom_text]]] begin[:]
variable[pad] assign[=] constant[0]
if compare[name[_size] less[<] name[initial_size]] begin[:]
continue
<ast.Try object at 0x7da18f7232b0>
return[name[size]]
if compare[name[self].keywidth is constant[None]] begin[:]
variable[width] assign[=] call[name[determine_side_length], parameter[call[name[self]._default, parameter[constant[legend_key_width], constant[18]]]]]
if compare[name[self].direction equal[==] constant[vertical]] begin[:]
call[name[width]][<ast.Slice object at 0x7da18f721210>] assign[=] call[name[width].max, parameter[]]
name[self]._keywidth assign[=] name[width]
if compare[name[self].keyheight is constant[None]] begin[:]
variable[height] assign[=] call[name[determine_side_length], parameter[call[name[self]._default, parameter[constant[legend_key_height], constant[18]]]]]
if compare[name[self].direction equal[==] constant[horizontal]] begin[:]
call[name[height]][<ast.Slice object at 0x7da18dc04190>] assign[=] call[name[height].max, parameter[]]
name[self]._keyheight assign[=] name[height] | keyword[def] identifier[_set_defaults] ( identifier[self] ):
identifier[guide] . identifier[_set_defaults] ( identifier[self] )
identifier[nbreak] = identifier[len] ( identifier[self] . identifier[key] )
keyword[if] identifier[self] . identifier[nrow] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[ncol] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[guide] . identifier[nrow] * identifier[guide] . identifier[ncol] < identifier[nbreak] :
keyword[raise] identifier[PlotnineError] (
literal[string] ,
literal[string] )
keyword[if] identifier[self] . identifier[nrow] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[ncol] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[direction] == literal[string] :
identifier[self] . identifier[nrow] = identifier[int] ( identifier[np] . identifier[ceil] ( identifier[nbreak] / literal[int] ))
keyword[else] :
identifier[self] . identifier[ncol] = identifier[int] ( identifier[np] . identifier[ceil] ( identifier[nbreak] / literal[int] ))
identifier[self] . identifier[nrow] = identifier[self] . identifier[nrow] keyword[or] identifier[int] ( identifier[np] . identifier[ceil] ( identifier[nbreak] / identifier[self] . identifier[ncol] ))
identifier[self] . identifier[ncol] = identifier[self] . identifier[ncol] keyword[or] identifier[int] ( identifier[np] . identifier[ceil] ( identifier[nbreak] / identifier[self] . identifier[nrow] ))
literal[string]
keyword[def] identifier[determine_side_length] ( identifier[initial_size] ):
identifier[default_pad] = identifier[initial_size] * literal[int]
identifier[size] = identifier[np] . identifier[ones] ( identifier[nbreak] )* identifier[initial_size]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nbreak] ):
keyword[for] identifier[gl] keyword[in] identifier[self] . identifier[glayers] :
identifier[_size] = literal[int]
identifier[pad] = identifier[default_pad]
keyword[with] identifier[suppress] ( identifier[IndexError] ):
keyword[if] literal[string] keyword[in] identifier[gl] . identifier[data] :
identifier[_size] = identifier[gl] . identifier[data] [ literal[string] ]. identifier[iloc] [ identifier[i] ]* identifier[SIZE_FACTOR]
keyword[if] literal[string] keyword[in] identifier[gl] . identifier[data] :
identifier[_size] +=( literal[int] * identifier[gl] . identifier[data] [ literal[string] ]. identifier[iloc] [ identifier[i] ]*
identifier[SIZE_FACTOR] )
keyword[if] identifier[issubclass] ( identifier[gl] . identifier[geom] , identifier[geom_text] ):
identifier[pad] = literal[int]
keyword[if] identifier[_size] < identifier[initial_size] :
keyword[continue]
keyword[try] :
keyword[if] identifier[gl] . identifier[data] [ literal[string] ]. identifier[iloc] [ identifier[i] ] keyword[is] keyword[not] keyword[None] :
identifier[size] [ identifier[i] ]= identifier[np] . identifier[max] ([ identifier[_size] + identifier[pad] , identifier[size] [ identifier[i] ]])
keyword[except] identifier[KeyError] :
keyword[break]
keyword[return] identifier[size]
keyword[if] identifier[self] . identifier[keywidth] keyword[is] keyword[None] :
identifier[width] = identifier[determine_side_length] (
identifier[self] . identifier[_default] ( literal[string] , literal[int] ))
keyword[if] identifier[self] . identifier[direction] == literal[string] :
identifier[width] [:]= identifier[width] . identifier[max] ()
identifier[self] . identifier[_keywidth] = identifier[width]
keyword[else] :
identifier[self] . identifier[_keywidth] =[ identifier[self] . identifier[keywidth] ]* identifier[nbreak]
keyword[if] identifier[self] . identifier[keyheight] keyword[is] keyword[None] :
identifier[height] = identifier[determine_side_length] (
identifier[self] . identifier[_default] ( literal[string] , literal[int] ))
keyword[if] identifier[self] . identifier[direction] == literal[string] :
identifier[height] [:]= identifier[height] . identifier[max] ()
identifier[self] . identifier[_keyheight] = identifier[height]
keyword[else] :
identifier[self] . identifier[_keyheight] =[ identifier[self] . identifier[keyheight] ]* identifier[nbreak] | def _set_defaults(self):
guide._set_defaults(self)
nbreak = len(self.key)
# rows and columns
if self.nrow is not None and self.ncol is not None:
if guide.nrow * guide.ncol < nbreak:
raise PlotnineError('nrow x ncol need to be larger', 'than the number of breaks') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.nrow is None and self.ncol is None:
if self.direction == 'horizontal':
self.nrow = int(np.ceil(nbreak / 5)) # depends on [control=['if'], data=[]]
else:
self.ncol = int(np.ceil(nbreak / 20)) # depends on [control=['if'], data=[]]
self.nrow = self.nrow or int(np.ceil(nbreak / self.ncol))
self.ncol = self.ncol or int(np.ceil(nbreak / self.nrow))
# key width and key height for each legend entry
#
# Take a peak into data['size'] to make sure the
# legend dimensions are big enough
"\n >>> gg = ggplot(diamonds, aes(x='cut', y='clarity'))\n >>> gg = gg + stat_sum(aes(group='cut'))\n >>> gg + scale_size(range=(3, 25))\n\n Note the different height sizes for the entries\n "
# FIXME: This should be in the geom instead of having
# special case conditional branches
def determine_side_length(initial_size):
default_pad = initial_size * 0.5
# default_pad = 0
size = np.ones(nbreak) * initial_size
for i in range(nbreak):
for gl in self.glayers:
_size = 0
pad = default_pad
# Full size of object to appear in the
# legend key
with suppress(IndexError):
if 'size' in gl.data:
_size = gl.data['size'].iloc[i] * SIZE_FACTOR
if 'stroke' in gl.data:
_size += 2 * gl.data['stroke'].iloc[i] * SIZE_FACTOR # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# special case, color does not apply to
# border/linewidth
if issubclass(gl.geom, geom_text):
pad = 0
if _size < initial_size:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
# color(edgecolor) affects size(linewidth)
# When the edge is not visible, we should
# not expand the size of the keys
if gl.data['color'].iloc[i] is not None:
size[i] = np.max([_size + pad, size[i]]) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
break # depends on [control=['except'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['gl']] # depends on [control=['for'], data=['i']]
return size
# keysize
if self.keywidth is None:
width = determine_side_length(self._default('legend_key_width', 18))
if self.direction == 'vertical':
width[:] = width.max() # depends on [control=['if'], data=[]]
self._keywidth = width # depends on [control=['if'], data=[]]
else:
self._keywidth = [self.keywidth] * nbreak
if self.keyheight is None:
height = determine_side_length(self._default('legend_key_height', 18))
if self.direction == 'horizontal':
height[:] = height.max() # depends on [control=['if'], data=[]]
self._keyheight = height # depends on [control=['if'], data=[]]
else:
self._keyheight = [self.keyheight] * nbreak |
def _all_datatable_data(self):
"""
Returns
-------
A list of tuples representing rows from all columns of the datatable,
sorted accordingly.
"""
dtbl = self.datatable
objs = object_session(self)
imcols = [dtbl.c.indx, dtbl.c.final, dtbl.c.override_feed000, dtbl.c.failsafe_feed999]
cols = imcols[:3] + [c for c in dtbl.c if c not in (imcols)] + [imcols[3]]
if isinstance(dtbl, Table):
return objs.query(*cols).order_by(dtbl.c.indx).all()
else:
raise Exception("Symbol has no datatable") | def function[_all_datatable_data, parameter[self]]:
constant[
Returns
-------
A list of tuples representing rows from all columns of the datatable,
sorted accordingly.
]
variable[dtbl] assign[=] name[self].datatable
variable[objs] assign[=] call[name[object_session], parameter[name[self]]]
variable[imcols] assign[=] list[[<ast.Attribute object at 0x7da2041da680>, <ast.Attribute object at 0x7da2041dab00>, <ast.Attribute object at 0x7da2041d8a60>, <ast.Attribute object at 0x7da2041dbeb0>]]
variable[cols] assign[=] binary_operation[binary_operation[call[name[imcols]][<ast.Slice object at 0x7da2041daa10>] + <ast.ListComp object at 0x7da2041d90c0>] + list[[<ast.Subscript object at 0x7da2041daaa0>]]]
if call[name[isinstance], parameter[name[dtbl], name[Table]]] begin[:]
return[call[call[call[name[objs].query, parameter[<ast.Starred object at 0x7da2041db5b0>]].order_by, parameter[name[dtbl].c.indx]].all, parameter[]]] | keyword[def] identifier[_all_datatable_data] ( identifier[self] ):
literal[string]
identifier[dtbl] = identifier[self] . identifier[datatable]
identifier[objs] = identifier[object_session] ( identifier[self] )
identifier[imcols] =[ identifier[dtbl] . identifier[c] . identifier[indx] , identifier[dtbl] . identifier[c] . identifier[final] , identifier[dtbl] . identifier[c] . identifier[override_feed000] , identifier[dtbl] . identifier[c] . identifier[failsafe_feed999] ]
identifier[cols] = identifier[imcols] [: literal[int] ]+[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[dtbl] . identifier[c] keyword[if] identifier[c] keyword[not] keyword[in] ( identifier[imcols] )]+[ identifier[imcols] [ literal[int] ]]
keyword[if] identifier[isinstance] ( identifier[dtbl] , identifier[Table] ):
keyword[return] identifier[objs] . identifier[query] (* identifier[cols] ). identifier[order_by] ( identifier[dtbl] . identifier[c] . identifier[indx] ). identifier[all] ()
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] ) | def _all_datatable_data(self):
"""
Returns
-------
A list of tuples representing rows from all columns of the datatable,
sorted accordingly.
"""
dtbl = self.datatable
objs = object_session(self)
imcols = [dtbl.c.indx, dtbl.c.final, dtbl.c.override_feed000, dtbl.c.failsafe_feed999]
cols = imcols[:3] + [c for c in dtbl.c if c not in imcols] + [imcols[3]]
if isinstance(dtbl, Table):
return objs.query(*cols).order_by(dtbl.c.indx).all() # depends on [control=['if'], data=[]]
else:
raise Exception('Symbol has no datatable') |
def getValueFromValue(value):
"""
Extract the currently set field from a Value structure
"""
if type(value) != common.AttributeValue:
raise TypeError(
"Expected an AttributeValue, but got {}".format(type(value)))
if value.WhichOneof("value") is None:
raise AttributeError("Nothing set for {}".format(value))
return getattr(value, value.WhichOneof("value")) | def function[getValueFromValue, parameter[value]]:
constant[
Extract the currently set field from a Value structure
]
if compare[call[name[type], parameter[name[value]]] not_equal[!=] name[common].AttributeValue] begin[:]
<ast.Raise object at 0x7da1b287ee00>
if compare[call[name[value].WhichOneof, parameter[constant[value]]] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b287e950>
return[call[name[getattr], parameter[name[value], call[name[value].WhichOneof, parameter[constant[value]]]]]] | keyword[def] identifier[getValueFromValue] ( identifier[value] ):
literal[string]
keyword[if] identifier[type] ( identifier[value] )!= identifier[common] . identifier[AttributeValue] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[type] ( identifier[value] )))
keyword[if] identifier[value] . identifier[WhichOneof] ( literal[string] ) keyword[is] keyword[None] :
keyword[raise] identifier[AttributeError] ( literal[string] . identifier[format] ( identifier[value] ))
keyword[return] identifier[getattr] ( identifier[value] , identifier[value] . identifier[WhichOneof] ( literal[string] )) | def getValueFromValue(value):
"""
Extract the currently set field from a Value structure
"""
if type(value) != common.AttributeValue:
raise TypeError('Expected an AttributeValue, but got {}'.format(type(value))) # depends on [control=['if'], data=[]]
if value.WhichOneof('value') is None:
raise AttributeError('Nothing set for {}'.format(value)) # depends on [control=['if'], data=[]]
return getattr(value, value.WhichOneof('value')) |
def account_states(self, **kwargs):
"""
This method lets users get the status of whether or not the movie has
been rated or added to their favourite or watch lists. A valid session
id is required.
Args:
session_id: see Authentication.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('account_states')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response | def function[account_states, parameter[self]]:
constant[
This method lets users get the status of whether or not the movie has
been rated or added to their favourite or watch lists. A valid session
id is required.
Args:
session_id: see Authentication.
Returns:
A dict representation of the JSON returned from the API.
]
variable[path] assign[=] call[name[self]._get_id_path, parameter[constant[account_states]]]
variable[response] assign[=] call[name[self]._GET, parameter[name[path], name[kwargs]]]
call[name[self]._set_attrs_to_values, parameter[name[response]]]
return[name[response]] | keyword[def] identifier[account_states] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = identifier[self] . identifier[_get_id_path] ( literal[string] )
identifier[response] = identifier[self] . identifier[_GET] ( identifier[path] , identifier[kwargs] )
identifier[self] . identifier[_set_attrs_to_values] ( identifier[response] )
keyword[return] identifier[response] | def account_states(self, **kwargs):
"""
This method lets users get the status of whether or not the movie has
been rated or added to their favourite or watch lists. A valid session
id is required.
Args:
session_id: see Authentication.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('account_states')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response |
def find_censored_md5ext(post_id: int) -> Optional[str]:
"Find MD5 for a censored post's ID, return None if can't find."
try:
last_pull_date = LAST_PULL_DATE_FILE.read_text().strip()
except FileNotFoundError:
last_pull_date = ""
date = datetime.utcnow()
date = f"{date.year}{date.month}{date.day}"
if last_pull_date != date:
update_batches()
LAST_PULL_DATE_FILE.parent.mkdir(exist_ok=True, parents=True)
LAST_PULL_DATE_FILE.write_text(date)
# Faster than converting every ID in files to int
post_id = str(post_id)
for batch in BATCHES_DIR.iterdir():
with open(batch, "r") as content:
for line in content:
an_id, its_md5_ext = line.split(":")
if post_id == an_id:
return its_md5_ext.rstrip().split(".")
return None | def function[find_censored_md5ext, parameter[post_id]]:
constant[Find MD5 for a censored post's ID, return None if can't find.]
<ast.Try object at 0x7da1b0aa7160>
variable[date] assign[=] call[name[datetime].utcnow, parameter[]]
variable[date] assign[=] <ast.JoinedStr object at 0x7da204963a00>
if compare[name[last_pull_date] not_equal[!=] name[date]] begin[:]
call[name[update_batches], parameter[]]
call[name[LAST_PULL_DATE_FILE].parent.mkdir, parameter[]]
call[name[LAST_PULL_DATE_FILE].write_text, parameter[name[date]]]
variable[post_id] assign[=] call[name[str], parameter[name[post_id]]]
for taget[name[batch]] in starred[call[name[BATCHES_DIR].iterdir, parameter[]]] begin[:]
with call[name[open], parameter[name[batch], constant[r]]] begin[:]
for taget[name[line]] in starred[name[content]] begin[:]
<ast.Tuple object at 0x7da1b0916020> assign[=] call[name[line].split, parameter[constant[:]]]
if compare[name[post_id] equal[==] name[an_id]] begin[:]
return[call[call[name[its_md5_ext].rstrip, parameter[]].split, parameter[constant[.]]]]
return[constant[None]] | keyword[def] identifier[find_censored_md5ext] ( identifier[post_id] : identifier[int] )-> identifier[Optional] [ identifier[str] ]:
literal[string]
keyword[try] :
identifier[last_pull_date] = identifier[LAST_PULL_DATE_FILE] . identifier[read_text] (). identifier[strip] ()
keyword[except] identifier[FileNotFoundError] :
identifier[last_pull_date] = literal[string]
identifier[date] = identifier[datetime] . identifier[utcnow] ()
identifier[date] = literal[string]
keyword[if] identifier[last_pull_date] != identifier[date] :
identifier[update_batches] ()
identifier[LAST_PULL_DATE_FILE] . identifier[parent] . identifier[mkdir] ( identifier[exist_ok] = keyword[True] , identifier[parents] = keyword[True] )
identifier[LAST_PULL_DATE_FILE] . identifier[write_text] ( identifier[date] )
identifier[post_id] = identifier[str] ( identifier[post_id] )
keyword[for] identifier[batch] keyword[in] identifier[BATCHES_DIR] . identifier[iterdir] ():
keyword[with] identifier[open] ( identifier[batch] , literal[string] ) keyword[as] identifier[content] :
keyword[for] identifier[line] keyword[in] identifier[content] :
identifier[an_id] , identifier[its_md5_ext] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] identifier[post_id] == identifier[an_id] :
keyword[return] identifier[its_md5_ext] . identifier[rstrip] (). identifier[split] ( literal[string] )
keyword[return] keyword[None] | def find_censored_md5ext(post_id: int) -> Optional[str]:
"""Find MD5 for a censored post's ID, return None if can't find."""
try:
last_pull_date = LAST_PULL_DATE_FILE.read_text().strip() # depends on [control=['try'], data=[]]
except FileNotFoundError:
last_pull_date = '' # depends on [control=['except'], data=[]]
date = datetime.utcnow()
date = f'{date.year}{date.month}{date.day}'
if last_pull_date != date:
update_batches()
LAST_PULL_DATE_FILE.parent.mkdir(exist_ok=True, parents=True)
LAST_PULL_DATE_FILE.write_text(date) # depends on [control=['if'], data=['date']]
# Faster than converting every ID in files to int
post_id = str(post_id)
for batch in BATCHES_DIR.iterdir():
with open(batch, 'r') as content:
for line in content:
(an_id, its_md5_ext) = line.split(':')
if post_id == an_id:
return its_md5_ext.rstrip().split('.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['content']] # depends on [control=['for'], data=['batch']]
return None |
def get_parent_mode_constraints(self):
"""Return the category and subcategory keys to be set in the
subordinate mode.
:returns: (the category definition, the hazard/exposure definition)
:rtype: (dict, dict)
"""
h, e, _hc, _ec = self.selected_impact_function_constraints()
if self.parent_step in [self.step_fc_hazlayer_from_canvas,
self.step_fc_hazlayer_from_browser]:
category = layer_purpose_hazard
subcategory = h
elif self.parent_step in [self.step_fc_explayer_from_canvas,
self.step_fc_explayer_from_browser]:
category = layer_purpose_exposure
subcategory = e
elif self.parent_step:
category = layer_purpose_aggregation
subcategory = None
else:
category = None
subcategory = None
return category, subcategory | def function[get_parent_mode_constraints, parameter[self]]:
constant[Return the category and subcategory keys to be set in the
subordinate mode.
:returns: (the category definition, the hazard/exposure definition)
:rtype: (dict, dict)
]
<ast.Tuple object at 0x7da1b2346140> assign[=] call[name[self].selected_impact_function_constraints, parameter[]]
if compare[name[self].parent_step in list[[<ast.Attribute object at 0x7da1b2344fd0>, <ast.Attribute object at 0x7da1b23448e0>]]] begin[:]
variable[category] assign[=] name[layer_purpose_hazard]
variable[subcategory] assign[=] name[h]
return[tuple[[<ast.Name object at 0x7da18f721b10>, <ast.Name object at 0x7da18f723280>]]] | keyword[def] identifier[get_parent_mode_constraints] ( identifier[self] ):
literal[string]
identifier[h] , identifier[e] , identifier[_hc] , identifier[_ec] = identifier[self] . identifier[selected_impact_function_constraints] ()
keyword[if] identifier[self] . identifier[parent_step] keyword[in] [ identifier[self] . identifier[step_fc_hazlayer_from_canvas] ,
identifier[self] . identifier[step_fc_hazlayer_from_browser] ]:
identifier[category] = identifier[layer_purpose_hazard]
identifier[subcategory] = identifier[h]
keyword[elif] identifier[self] . identifier[parent_step] keyword[in] [ identifier[self] . identifier[step_fc_explayer_from_canvas] ,
identifier[self] . identifier[step_fc_explayer_from_browser] ]:
identifier[category] = identifier[layer_purpose_exposure]
identifier[subcategory] = identifier[e]
keyword[elif] identifier[self] . identifier[parent_step] :
identifier[category] = identifier[layer_purpose_aggregation]
identifier[subcategory] = keyword[None]
keyword[else] :
identifier[category] = keyword[None]
identifier[subcategory] = keyword[None]
keyword[return] identifier[category] , identifier[subcategory] | def get_parent_mode_constraints(self):
"""Return the category and subcategory keys to be set in the
subordinate mode.
:returns: (the category definition, the hazard/exposure definition)
:rtype: (dict, dict)
"""
(h, e, _hc, _ec) = self.selected_impact_function_constraints()
if self.parent_step in [self.step_fc_hazlayer_from_canvas, self.step_fc_hazlayer_from_browser]:
category = layer_purpose_hazard
subcategory = h # depends on [control=['if'], data=[]]
elif self.parent_step in [self.step_fc_explayer_from_canvas, self.step_fc_explayer_from_browser]:
category = layer_purpose_exposure
subcategory = e # depends on [control=['if'], data=[]]
elif self.parent_step:
category = layer_purpose_aggregation
subcategory = None # depends on [control=['if'], data=[]]
else:
category = None
subcategory = None
return (category, subcategory) |
def _create_default_config_file(self):
"""
If config file does not exists create and set default values.
"""
logger.info('Initialize Maya launcher, creating config file...\n')
self.add_section(self.DEFAULTS)
self.add_section(self.PATTERNS)
self.add_section(self.ENVIRONMENTS)
self.add_section(self.EXECUTABLES)
self.set(self.DEFAULTS, 'executable', None)
self.set(self.DEFAULTS, 'environment', None)
self.set(self.PATTERNS, 'exclude', ', '.join(self.EXLUDE_PATTERNS))
self.set(self.PATTERNS, 'icon_ext', ', '.join(self.ICON_EXTENSIONS))
self.config_file.parent.mkdir(exist_ok=True)
self.config_file.touch()
with self.config_file.open('wb') as f:
self.write(f)
# If this function is run inform the user that a new file has been
# created.
sys.exit('Maya launcher has successfully created config file at:\n'
' "{}"'.format(str(self.config_file))) | def function[_create_default_config_file, parameter[self]]:
constant[
If config file does not exists create and set default values.
]
call[name[logger].info, parameter[constant[Initialize Maya launcher, creating config file...
]]]
call[name[self].add_section, parameter[name[self].DEFAULTS]]
call[name[self].add_section, parameter[name[self].PATTERNS]]
call[name[self].add_section, parameter[name[self].ENVIRONMENTS]]
call[name[self].add_section, parameter[name[self].EXECUTABLES]]
call[name[self].set, parameter[name[self].DEFAULTS, constant[executable], constant[None]]]
call[name[self].set, parameter[name[self].DEFAULTS, constant[environment], constant[None]]]
call[name[self].set, parameter[name[self].PATTERNS, constant[exclude], call[constant[, ].join, parameter[name[self].EXLUDE_PATTERNS]]]]
call[name[self].set, parameter[name[self].PATTERNS, constant[icon_ext], call[constant[, ].join, parameter[name[self].ICON_EXTENSIONS]]]]
call[name[self].config_file.parent.mkdir, parameter[]]
call[name[self].config_file.touch, parameter[]]
with call[name[self].config_file.open, parameter[constant[wb]]] begin[:]
call[name[self].write, parameter[name[f]]]
call[name[sys].exit, parameter[call[constant[Maya launcher has successfully created config file at:
"{}"].format, parameter[call[name[str], parameter[name[self].config_file]]]]]] | keyword[def] identifier[_create_default_config_file] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[add_section] ( identifier[self] . identifier[DEFAULTS] )
identifier[self] . identifier[add_section] ( identifier[self] . identifier[PATTERNS] )
identifier[self] . identifier[add_section] ( identifier[self] . identifier[ENVIRONMENTS] )
identifier[self] . identifier[add_section] ( identifier[self] . identifier[EXECUTABLES] )
identifier[self] . identifier[set] ( identifier[self] . identifier[DEFAULTS] , literal[string] , keyword[None] )
identifier[self] . identifier[set] ( identifier[self] . identifier[DEFAULTS] , literal[string] , keyword[None] )
identifier[self] . identifier[set] ( identifier[self] . identifier[PATTERNS] , literal[string] , literal[string] . identifier[join] ( identifier[self] . identifier[EXLUDE_PATTERNS] ))
identifier[self] . identifier[set] ( identifier[self] . identifier[PATTERNS] , literal[string] , literal[string] . identifier[join] ( identifier[self] . identifier[ICON_EXTENSIONS] ))
identifier[self] . identifier[config_file] . identifier[parent] . identifier[mkdir] ( identifier[exist_ok] = keyword[True] )
identifier[self] . identifier[config_file] . identifier[touch] ()
keyword[with] identifier[self] . identifier[config_file] . identifier[open] ( literal[string] ) keyword[as] identifier[f] :
identifier[self] . identifier[write] ( identifier[f] )
identifier[sys] . identifier[exit] ( literal[string]
literal[string] . identifier[format] ( identifier[str] ( identifier[self] . identifier[config_file] ))) | def _create_default_config_file(self):
"""
If config file does not exists create and set default values.
"""
logger.info('Initialize Maya launcher, creating config file...\n')
self.add_section(self.DEFAULTS)
self.add_section(self.PATTERNS)
self.add_section(self.ENVIRONMENTS)
self.add_section(self.EXECUTABLES)
self.set(self.DEFAULTS, 'executable', None)
self.set(self.DEFAULTS, 'environment', None)
self.set(self.PATTERNS, 'exclude', ', '.join(self.EXLUDE_PATTERNS))
self.set(self.PATTERNS, 'icon_ext', ', '.join(self.ICON_EXTENSIONS))
self.config_file.parent.mkdir(exist_ok=True)
self.config_file.touch()
with self.config_file.open('wb') as f:
self.write(f) # depends on [control=['with'], data=['f']] # If this function is run inform the user that a new file has been
# created.
sys.exit('Maya launcher has successfully created config file at:\n "{}"'.format(str(self.config_file))) |
def activate_in_ec(self, ec_index):
'''Activate this component in an execution context.
@param ec_index The index of the execution context to activate in.
This index is into the total array of contexts, that
is both owned and participating contexts. If the value
of ec_index is greater than the length of
@ref owned_ecs, that length is subtracted from
ec_index and the result used as an index into
@ref participating_ecs.
'''
with self._mutex:
if ec_index >= len(self.owned_ecs):
ec_index -= len(self.owned_ecs)
if ec_index >= len(self.participating_ecs):
raise exceptions.BadECIndexError(ec_index)
ec = self.participating_ecs[ec_index]
else:
ec = self.owned_ecs[ec_index]
ec.activate_component(self._obj) | def function[activate_in_ec, parameter[self, ec_index]]:
constant[Activate this component in an execution context.
@param ec_index The index of the execution context to activate in.
This index is into the total array of contexts, that
is both owned and participating contexts. If the value
of ec_index is greater than the length of
@ref owned_ecs, that length is subtracted from
ec_index and the result used as an index into
@ref participating_ecs.
]
with name[self]._mutex begin[:]
if compare[name[ec_index] greater_or_equal[>=] call[name[len], parameter[name[self].owned_ecs]]] begin[:]
<ast.AugAssign object at 0x7da207f03c70>
if compare[name[ec_index] greater_or_equal[>=] call[name[len], parameter[name[self].participating_ecs]]] begin[:]
<ast.Raise object at 0x7da18ede6e00>
variable[ec] assign[=] call[name[self].participating_ecs][name[ec_index]]
call[name[ec].activate_component, parameter[name[self]._obj]] | keyword[def] identifier[activate_in_ec] ( identifier[self] , identifier[ec_index] ):
literal[string]
keyword[with] identifier[self] . identifier[_mutex] :
keyword[if] identifier[ec_index] >= identifier[len] ( identifier[self] . identifier[owned_ecs] ):
identifier[ec_index] -= identifier[len] ( identifier[self] . identifier[owned_ecs] )
keyword[if] identifier[ec_index] >= identifier[len] ( identifier[self] . identifier[participating_ecs] ):
keyword[raise] identifier[exceptions] . identifier[BadECIndexError] ( identifier[ec_index] )
identifier[ec] = identifier[self] . identifier[participating_ecs] [ identifier[ec_index] ]
keyword[else] :
identifier[ec] = identifier[self] . identifier[owned_ecs] [ identifier[ec_index] ]
identifier[ec] . identifier[activate_component] ( identifier[self] . identifier[_obj] ) | def activate_in_ec(self, ec_index):
"""Activate this component in an execution context.
@param ec_index The index of the execution context to activate in.
This index is into the total array of contexts, that
is both owned and participating contexts. If the value
of ec_index is greater than the length of
@ref owned_ecs, that length is subtracted from
ec_index and the result used as an index into
@ref participating_ecs.
"""
with self._mutex:
if ec_index >= len(self.owned_ecs):
ec_index -= len(self.owned_ecs)
if ec_index >= len(self.participating_ecs):
raise exceptions.BadECIndexError(ec_index) # depends on [control=['if'], data=['ec_index']]
ec = self.participating_ecs[ec_index] # depends on [control=['if'], data=['ec_index']]
else:
ec = self.owned_ecs[ec_index]
ec.activate_component(self._obj) # depends on [control=['with'], data=[]] |
def getAuthenticatedUser(self, headers=None, query_params=None, content_type="application/json"):
"""
Get currently authenticated user (if any).
It is method for GET /self
"""
uri = self.client.base_url + "/self"
return self.client.get(uri, None, headers, query_params, content_type) | def function[getAuthenticatedUser, parameter[self, headers, query_params, content_type]]:
constant[
Get currently authenticated user (if any).
It is method for GET /self
]
variable[uri] assign[=] binary_operation[name[self].client.base_url + constant[/self]]
return[call[name[self].client.get, parameter[name[uri], constant[None], name[headers], name[query_params], name[content_type]]]] | keyword[def] identifier[getAuthenticatedUser] ( identifier[self] , identifier[headers] = keyword[None] , identifier[query_params] = keyword[None] , identifier[content_type] = literal[string] ):
literal[string]
identifier[uri] = identifier[self] . identifier[client] . identifier[base_url] + literal[string]
keyword[return] identifier[self] . identifier[client] . identifier[get] ( identifier[uri] , keyword[None] , identifier[headers] , identifier[query_params] , identifier[content_type] ) | def getAuthenticatedUser(self, headers=None, query_params=None, content_type='application/json'):
"""
Get currently authenticated user (if any).
It is method for GET /self
"""
uri = self.client.base_url + '/self'
return self.client.get(uri, None, headers, query_params, content_type) |
def validate(cls, mapper_spec):
"""Inherit docs."""
writer_spec = cls.get_params(mapper_spec, allow_old=False)
if writer_spec.get(cls._NO_DUPLICATE, False) not in (True, False):
raise errors.BadWriterParamsError("No duplicate must a boolean.")
super(_GoogleCloudStorageOutputWriter, cls).validate(mapper_spec) | def function[validate, parameter[cls, mapper_spec]]:
constant[Inherit docs.]
variable[writer_spec] assign[=] call[name[cls].get_params, parameter[name[mapper_spec]]]
if compare[call[name[writer_spec].get, parameter[name[cls]._NO_DUPLICATE, constant[False]]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6c7340>, <ast.Constant object at 0x7da20c6c59c0>]]] begin[:]
<ast.Raise object at 0x7da20c6c47f0>
call[call[name[super], parameter[name[_GoogleCloudStorageOutputWriter], name[cls]]].validate, parameter[name[mapper_spec]]] | keyword[def] identifier[validate] ( identifier[cls] , identifier[mapper_spec] ):
literal[string]
identifier[writer_spec] = identifier[cls] . identifier[get_params] ( identifier[mapper_spec] , identifier[allow_old] = keyword[False] )
keyword[if] identifier[writer_spec] . identifier[get] ( identifier[cls] . identifier[_NO_DUPLICATE] , keyword[False] ) keyword[not] keyword[in] ( keyword[True] , keyword[False] ):
keyword[raise] identifier[errors] . identifier[BadWriterParamsError] ( literal[string] )
identifier[super] ( identifier[_GoogleCloudStorageOutputWriter] , identifier[cls] ). identifier[validate] ( identifier[mapper_spec] ) | def validate(cls, mapper_spec):
"""Inherit docs."""
writer_spec = cls.get_params(mapper_spec, allow_old=False)
if writer_spec.get(cls._NO_DUPLICATE, False) not in (True, False):
raise errors.BadWriterParamsError('No duplicate must a boolean.') # depends on [control=['if'], data=[]]
super(_GoogleCloudStorageOutputWriter, cls).validate(mapper_spec) |
def get_value_format(value_format: Union[Callable, str] = str) -> Callable[[float], str]:
"""Create a formatting function from a generic value_format argument.
"""
if value_format is None:
value_format = ""
if isinstance(value_format, str):
format_str = "{0:" + value_format + "}"
def value_format(x): return format_str.format(x)
return value_format | def function[get_value_format, parameter[value_format]]:
constant[Create a formatting function from a generic value_format argument.
]
if compare[name[value_format] is constant[None]] begin[:]
variable[value_format] assign[=] constant[]
if call[name[isinstance], parameter[name[value_format], name[str]]] begin[:]
variable[format_str] assign[=] binary_operation[binary_operation[constant[{0:] + name[value_format]] + constant[}]]
def function[value_format, parameter[x]]:
return[call[name[format_str].format, parameter[name[x]]]]
return[name[value_format]] | keyword[def] identifier[get_value_format] ( identifier[value_format] : identifier[Union] [ identifier[Callable] , identifier[str] ]= identifier[str] )-> identifier[Callable] [[ identifier[float] ], identifier[str] ]:
literal[string]
keyword[if] identifier[value_format] keyword[is] keyword[None] :
identifier[value_format] = literal[string]
keyword[if] identifier[isinstance] ( identifier[value_format] , identifier[str] ):
identifier[format_str] = literal[string] + identifier[value_format] + literal[string]
keyword[def] identifier[value_format] ( identifier[x] ): keyword[return] identifier[format_str] . identifier[format] ( identifier[x] )
keyword[return] identifier[value_format] | def get_value_format(value_format: Union[Callable, str]=str) -> Callable[[float], str]:
"""Create a formatting function from a generic value_format argument.
"""
if value_format is None:
value_format = '' # depends on [control=['if'], data=['value_format']]
if isinstance(value_format, str):
format_str = '{0:' + value_format + '}'
def value_format(x):
return format_str.format(x) # depends on [control=['if'], data=[]]
return value_format |
def _get_variation_id(value, capital=False):
""" Convert an integer value to a character. a-z then double aa-zz etc
Args:
value (int): integer index we're looking up
capital (bool): whether we convert to capitals or not
Returns (str): alphanumeric representation of the index
"""
# Reinforcing type just in case a valid string was entered
value = int(value)
base_power = base_start = base_end = 0
while value >= base_end:
base_power += 1
base_start = base_end
base_end += pow(26, base_power)
base_index = value - base_start
# create alpha representation
alphas = ['a'] * base_power
for index in range(base_power - 1, -1, -1):
alphas[index] = chr(int(97 + (base_index % 26)))
base_index /= 26
characters = ''.join(alphas)
return characters.upper() if capital else characters | def function[_get_variation_id, parameter[value, capital]]:
constant[ Convert an integer value to a character. a-z then double aa-zz etc
Args:
value (int): integer index we're looking up
capital (bool): whether we convert to capitals or not
Returns (str): alphanumeric representation of the index
]
variable[value] assign[=] call[name[int], parameter[name[value]]]
variable[base_power] assign[=] constant[0]
while compare[name[value] greater_or_equal[>=] name[base_end]] begin[:]
<ast.AugAssign object at 0x7da2054a5d50>
variable[base_start] assign[=] name[base_end]
<ast.AugAssign object at 0x7da2054a76a0>
variable[base_index] assign[=] binary_operation[name[value] - name[base_start]]
variable[alphas] assign[=] binary_operation[list[[<ast.Constant object at 0x7da2054a4280>]] * name[base_power]]
for taget[name[index]] in starred[call[name[range], parameter[binary_operation[name[base_power] - constant[1]], <ast.UnaryOp object at 0x7da2054a5f00>, <ast.UnaryOp object at 0x7da2054a7d60>]]] begin[:]
call[name[alphas]][name[index]] assign[=] call[name[chr], parameter[call[name[int], parameter[binary_operation[constant[97] + binary_operation[name[base_index] <ast.Mod object at 0x7da2590d6920> constant[26]]]]]]]
<ast.AugAssign object at 0x7da20c76e350>
variable[characters] assign[=] call[constant[].join, parameter[name[alphas]]]
return[<ast.IfExp object at 0x7da20c76dea0>] | keyword[def] identifier[_get_variation_id] ( identifier[value] , identifier[capital] = keyword[False] ):
literal[string]
identifier[value] = identifier[int] ( identifier[value] )
identifier[base_power] = identifier[base_start] = identifier[base_end] = literal[int]
keyword[while] identifier[value] >= identifier[base_end] :
identifier[base_power] += literal[int]
identifier[base_start] = identifier[base_end]
identifier[base_end] += identifier[pow] ( literal[int] , identifier[base_power] )
identifier[base_index] = identifier[value] - identifier[base_start]
identifier[alphas] =[ literal[string] ]* identifier[base_power]
keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[base_power] - literal[int] ,- literal[int] ,- literal[int] ):
identifier[alphas] [ identifier[index] ]= identifier[chr] ( identifier[int] ( literal[int] +( identifier[base_index] % literal[int] )))
identifier[base_index] /= literal[int]
identifier[characters] = literal[string] . identifier[join] ( identifier[alphas] )
keyword[return] identifier[characters] . identifier[upper] () keyword[if] identifier[capital] keyword[else] identifier[characters] | def _get_variation_id(value, capital=False):
""" Convert an integer value to a character. a-z then double aa-zz etc
Args:
value (int): integer index we're looking up
capital (bool): whether we convert to capitals or not
Returns (str): alphanumeric representation of the index
"""
# Reinforcing type just in case a valid string was entered
value = int(value)
base_power = base_start = base_end = 0
while value >= base_end:
base_power += 1
base_start = base_end
base_end += pow(26, base_power) # depends on [control=['while'], data=['base_end']]
base_index = value - base_start
# create alpha representation
alphas = ['a'] * base_power
for index in range(base_power - 1, -1, -1):
alphas[index] = chr(int(97 + base_index % 26))
base_index /= 26 # depends on [control=['for'], data=['index']]
characters = ''.join(alphas)
return characters.upper() if capital else characters |
def _write_file(iface, data, folder, pattern):
'''
Writes a file to disk
'''
filename = os.path.join(folder, pattern.format(iface))
if not os.path.exists(folder):
msg = '{0} cannot be written. {1} does not exist'
msg = msg.format(filename, folder)
log.error(msg)
raise AttributeError(msg)
with salt.utils.files.flopen(filename, 'w') as fout:
fout.write(salt.utils.stringutils.to_str(data))
return filename | def function[_write_file, parameter[iface, data, folder, pattern]]:
constant[
Writes a file to disk
]
variable[filename] assign[=] call[name[os].path.join, parameter[name[folder], call[name[pattern].format, parameter[name[iface]]]]]
if <ast.UnaryOp object at 0x7da1b1f9b9a0> begin[:]
variable[msg] assign[=] constant[{0} cannot be written. {1} does not exist]
variable[msg] assign[=] call[name[msg].format, parameter[name[filename], name[folder]]]
call[name[log].error, parameter[name[msg]]]
<ast.Raise object at 0x7da1b1f9ad40>
with call[name[salt].utils.files.flopen, parameter[name[filename], constant[w]]] begin[:]
call[name[fout].write, parameter[call[name[salt].utils.stringutils.to_str, parameter[name[data]]]]]
return[name[filename]] | keyword[def] identifier[_write_file] ( identifier[iface] , identifier[data] , identifier[folder] , identifier[pattern] ):
literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[pattern] . identifier[format] ( identifier[iface] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[folder] ):
identifier[msg] = literal[string]
identifier[msg] = identifier[msg] . identifier[format] ( identifier[filename] , identifier[folder] )
identifier[log] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[flopen] ( identifier[filename] , literal[string] ) keyword[as] identifier[fout] :
identifier[fout] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[data] ))
keyword[return] identifier[filename] | def _write_file(iface, data, folder, pattern):
"""
Writes a file to disk
"""
filename = os.path.join(folder, pattern.format(iface))
if not os.path.exists(folder):
msg = '{0} cannot be written. {1} does not exist'
msg = msg.format(filename, folder)
log.error(msg)
raise AttributeError(msg) # depends on [control=['if'], data=[]]
with salt.utils.files.flopen(filename, 'w') as fout:
fout.write(salt.utils.stringutils.to_str(data)) # depends on [control=['with'], data=['fout']]
return filename |
def first(sequence, message=None):
"""The first item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return next(iter(sequence))
except StopIteration:
raise ValueError(message or ('Sequence is empty: %s' % sequence)) | def function[first, parameter[sequence, message]]:
constant[The first item in that sequence
If there aren't any, raise a ValueError with that message
]
<ast.Try object at 0x7da2044c3460> | keyword[def] identifier[first] ( identifier[sequence] , identifier[message] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[next] ( identifier[iter] ( identifier[sequence] ))
keyword[except] identifier[StopIteration] :
keyword[raise] identifier[ValueError] ( identifier[message] keyword[or] ( literal[string] % identifier[sequence] )) | def first(sequence, message=None):
"""The first item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return next(iter(sequence)) # depends on [control=['try'], data=[]]
except StopIteration:
raise ValueError(message or 'Sequence is empty: %s' % sequence) # depends on [control=['except'], data=[]] |
def get_plaintext_to_sign(self):
"""
Get back the plaintext that will be signed.
It is derived from the serialized zone file strings,
but encoded as a single string (omitting the signature field,
if already given)
"""
as_strings = self.pack_subdomain()
if self.sig is not None:
# don't sign the signature
as_strings = as_strings[:-1]
return ",".join(as_strings) | def function[get_plaintext_to_sign, parameter[self]]:
constant[
Get back the plaintext that will be signed.
It is derived from the serialized zone file strings,
but encoded as a single string (omitting the signature field,
if already given)
]
variable[as_strings] assign[=] call[name[self].pack_subdomain, parameter[]]
if compare[name[self].sig is_not constant[None]] begin[:]
variable[as_strings] assign[=] call[name[as_strings]][<ast.Slice object at 0x7da20e963d90>]
return[call[constant[,].join, parameter[name[as_strings]]]] | keyword[def] identifier[get_plaintext_to_sign] ( identifier[self] ):
literal[string]
identifier[as_strings] = identifier[self] . identifier[pack_subdomain] ()
keyword[if] identifier[self] . identifier[sig] keyword[is] keyword[not] keyword[None] :
identifier[as_strings] = identifier[as_strings] [:- literal[int] ]
keyword[return] literal[string] . identifier[join] ( identifier[as_strings] ) | def get_plaintext_to_sign(self):
"""
Get back the plaintext that will be signed.
It is derived from the serialized zone file strings,
but encoded as a single string (omitting the signature field,
if already given)
"""
as_strings = self.pack_subdomain()
if self.sig is not None:
# don't sign the signature
as_strings = as_strings[:-1] # depends on [control=['if'], data=[]]
return ','.join(as_strings) |
def bsp_contains(node: tcod.bsp.BSP, cx: int, cy: int) -> bool:
"""
.. deprecated:: 2.0
Use :any:`BSP.contains` instead.
"""
return node.contains(cx, cy) | def function[bsp_contains, parameter[node, cx, cy]]:
constant[
.. deprecated:: 2.0
Use :any:`BSP.contains` instead.
]
return[call[name[node].contains, parameter[name[cx], name[cy]]]] | keyword[def] identifier[bsp_contains] ( identifier[node] : identifier[tcod] . identifier[bsp] . identifier[BSP] , identifier[cx] : identifier[int] , identifier[cy] : identifier[int] )-> identifier[bool] :
literal[string]
keyword[return] identifier[node] . identifier[contains] ( identifier[cx] , identifier[cy] ) | def bsp_contains(node: tcod.bsp.BSP, cx: int, cy: int) -> bool:
"""
.. deprecated:: 2.0
Use :any:`BSP.contains` instead.
"""
return node.contains(cx, cy) |
def withFile(file, func, mode='r', expand=False):
"""Pass `file` to `func` and ensure the file is closed afterwards. If
`file` is a string, open according to `mode`; if `expand` is true also
expand user and vars.
"""
file = _normalizeToFile(file, mode=mode, expand=expand)
try: return func(file)
finally: file.close() | def function[withFile, parameter[file, func, mode, expand]]:
constant[Pass `file` to `func` and ensure the file is closed afterwards. If
`file` is a string, open according to `mode`; if `expand` is true also
expand user and vars.
]
variable[file] assign[=] call[name[_normalizeToFile], parameter[name[file]]]
<ast.Try object at 0x7da1b10244c0> | keyword[def] identifier[withFile] ( identifier[file] , identifier[func] , identifier[mode] = literal[string] , identifier[expand] = keyword[False] ):
literal[string]
identifier[file] = identifier[_normalizeToFile] ( identifier[file] , identifier[mode] = identifier[mode] , identifier[expand] = identifier[expand] )
keyword[try] : keyword[return] identifier[func] ( identifier[file] )
keyword[finally] : identifier[file] . identifier[close] () | def withFile(file, func, mode='r', expand=False):
"""Pass `file` to `func` and ensure the file is closed afterwards. If
`file` is a string, open according to `mode`; if `expand` is true also
expand user and vars.
"""
file = _normalizeToFile(file, mode=mode, expand=expand)
try:
return func(file) # depends on [control=['try'], data=[]]
finally:
file.close() |
def handoverCommand(SynchronizationIndication_presence=0,
FrequencyShortList_presence=0, FrequencyList_presence=0,
CellChannelDescription_presence=0,
MultislotAllocation_presence=0,
ChannelMode_presence=0, ChannelMode_presence1=0,
ChannelMode_presence2=0,
ChannelMode_presence3=0, ChannelMode_presence4=0,
ChannelMode_presence5=0,
ChannelMode_presence6=0, ChannelMode_presence7=0,
ChannelDescription_presence1=0, ChannelMode2_presence=0,
FrequencyChannelSequence_presence=0,
MobileAllocation_presence=0,
StartingTime_presence=0, TimeDifference_presence=0,
TimingAdvance_presence=0,
FrequencyShortList_presence1=0,
FrequencyList_presence1=0,
ChannelDescription2_presence=0,
ChannelDescription_presence2=0,
FrequencyChannelSequence_presence1=0,
MobileAllocation_presence1=0,
CipherModeSetting_presence=0,
VgcsTargetModeIdentication_presence=0,
MultiRateConfiguration_presence=0):
"""HANDOVER COMMAND Section 9.1.15"""
name = "Handover Command"
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2b) # 00101011
c = CellDescription()
d = ChannelDescription2()
e = HandoverReference()
f = PowerCommandAndAccessType()
packet = a / b / c / d / e / f
if SynchronizationIndication_presence is 1:
g = SynchronizationIndicationHdr(ieiSI=0xD, eightBitSI=0x0)
packet = packet / g
if FrequencyShortList_presence is 1:
h = FrequencyShortListHdr(ieiFSL=0x02)
packet = packet / h
if FrequencyList_presence is 1:
i = FrequencyListHdr(ieiFL=0x05, eightBitFL=0x0)
packet = packet / i
if CellChannelDescription_presence is 1:
j = CellChannelDescriptionHdr(ieiCCD=0x62, eightBitCCD=0x0)
packet = packet / j
if MultislotAllocation_presence is 1:
k = MultislotAllocationHdr(ieiMSA=0x10, eightBitMSA=0x0)
packet = packet / k
if ChannelMode_presence is 1:
l = ChannelModeHdr(ieiCM=0x63, eightBitCM=0x0)
packet = packet / l
if ChannelMode_presence1 is 1:
m = ChannelModeHdr(ieiCM=0x11, eightBitCM=0x0)
packet = packet / m
if ChannelMode_presence2 is 1:
n = ChannelModeHdr(ieiCM=0x13, eightBitCM=0x0)
packet = packet / n
if ChannelMode_presence3 is 1:
o = ChannelModeHdr(ieiCM=0x14, eightBitCM=0x0)
packet = packet / o
if ChannelMode_presence4 is 1:
p = ChannelModeHdr(ieiCM=0x15, eightBitCM=0x0)
packet = packet / p
if ChannelMode_presence5 is 1:
q = ChannelModeHdr(ieiCM=0x16, eightBitCM=0x0)
packet = packet / q
if ChannelMode_presence6 is 1:
r = ChannelModeHdr(ieiCM=0x17, eightBitCM=0x0)
packet = packet / r
if ChannelMode_presence7 is 1:
s = ChannelModeHdr(ieiCM=0x18, eightBitCM=0x0)
packet = packet / s
if ChannelDescription_presence1 is 1:
s1 = ChannelDescriptionHdr(ieiCD=0x64, eightBitCD=0x0)
packet = packet / s1
if ChannelMode2_presence is 1:
t = ChannelMode2Hdr(ieiCM2=0x66, eightBitCM2=0x0)
packet = packet / t
if FrequencyChannelSequence_presence is 1:
u = FrequencyChannelSequenceHdr(ieiFCS=0x69, eightBitFCS=0x0)
packet = packet / u
if MobileAllocation_presence is 1:
v = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / v
if StartingTime_presence is 1:
w = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / w
if TimeDifference_presence is 1:
x = TimeDifferenceHdr(ieiTD=0x7B, eightBitTD=0x0)
packet = packet / x
if TimingAdvance_presence is 1:
y = TimingAdvanceHdr(ieiTA=0x7D, eightBitTA=0x0)
packet = packet / y
if FrequencyShortList_presence1 is 1:
z = FrequencyShortListHdr(ieiFSL=0x12)
packet = packet / z
if FrequencyList_presence1 is 1:
aa = FrequencyListHdr(ieiFL=0x19, eightBitFL=0x0)
packet = packet / aa
if ChannelDescription2_presence is 1:
ab = ChannelDescription2Hdr(ieiCD2=0x1C, eightBitCD2=0x0)
packet = packet / ab
if ChannelDescription_presence2 is 1:
ac = ChannelDescriptionHdr(ieiCD=0x1D, eightBitCD=0x0)
packet = packet / ac
if FrequencyChannelSequence_presence1 is 1:
ad = FrequencyChannelSequenceHdr(ieiFCS=0x1E, eightBitFCS=0x0)
packet = packet / ad
if MobileAllocation_presence1 is 1:
ae = MobileAllocationHdr(ieiMA=0x21, eightBitMA=0x0)
packet = packet / ae
if CipherModeSetting_presence is 1:
af = CipherModeSettingHdr(ieiCMS=0x9, eightBitCMS=0x0)
packet = packet / af
if VgcsTargetModeIdentication_presence is 1:
ag = VgcsTargetModeIdenticationHdr(ieiVTMI=0x01, eightBitVTMI=0x0)
packet = packet / ag
if MultiRateConfiguration_presence is 1:
ah = MultiRateConfigurationHdr(ieiMRC=0x03, eightBitMRC=0x0)
packet = packet / ah
return packet | def function[handoverCommand, parameter[SynchronizationIndication_presence, FrequencyShortList_presence, FrequencyList_presence, CellChannelDescription_presence, MultislotAllocation_presence, ChannelMode_presence, ChannelMode_presence1, ChannelMode_presence2, ChannelMode_presence3, ChannelMode_presence4, ChannelMode_presence5, ChannelMode_presence6, ChannelMode_presence7, ChannelDescription_presence1, ChannelMode2_presence, FrequencyChannelSequence_presence, MobileAllocation_presence, StartingTime_presence, TimeDifference_presence, TimingAdvance_presence, FrequencyShortList_presence1, FrequencyList_presence1, ChannelDescription2_presence, ChannelDescription_presence2, FrequencyChannelSequence_presence1, MobileAllocation_presence1, CipherModeSetting_presence, VgcsTargetModeIdentication_presence, MultiRateConfiguration_presence]]:
constant[HANDOVER COMMAND Section 9.1.15]
variable[name] assign[=] constant[Handover Command]
variable[a] assign[=] call[name[TpPd], parameter[]]
variable[b] assign[=] call[name[MessageType], parameter[]]
variable[c] assign[=] call[name[CellDescription], parameter[]]
variable[d] assign[=] call[name[ChannelDescription2], parameter[]]
variable[e] assign[=] call[name[HandoverReference], parameter[]]
variable[f] assign[=] call[name[PowerCommandAndAccessType], parameter[]]
variable[packet] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] / name[e]] / name[f]]
if compare[name[SynchronizationIndication_presence] is constant[1]] begin[:]
variable[g] assign[=] call[name[SynchronizationIndicationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[g]]
if compare[name[FrequencyShortList_presence] is constant[1]] begin[:]
variable[h] assign[=] call[name[FrequencyShortListHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[h]]
if compare[name[FrequencyList_presence] is constant[1]] begin[:]
variable[i] assign[=] call[name[FrequencyListHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[i]]
if compare[name[CellChannelDescription_presence] is constant[1]] begin[:]
variable[j] assign[=] call[name[CellChannelDescriptionHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[j]]
if compare[name[MultislotAllocation_presence] is constant[1]] begin[:]
variable[k] assign[=] call[name[MultislotAllocationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[k]]
if compare[name[ChannelMode_presence] is constant[1]] begin[:]
variable[l] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[l]]
if compare[name[ChannelMode_presence1] is constant[1]] begin[:]
variable[m] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[m]]
if compare[name[ChannelMode_presence2] is constant[1]] begin[:]
variable[n] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[n]]
if compare[name[ChannelMode_presence3] is constant[1]] begin[:]
variable[o] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[o]]
if compare[name[ChannelMode_presence4] is constant[1]] begin[:]
variable[p] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[p]]
if compare[name[ChannelMode_presence5] is constant[1]] begin[:]
variable[q] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[q]]
if compare[name[ChannelMode_presence6] is constant[1]] begin[:]
variable[r] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[r]]
if compare[name[ChannelMode_presence7] is constant[1]] begin[:]
variable[s] assign[=] call[name[ChannelModeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[s]]
if compare[name[ChannelDescription_presence1] is constant[1]] begin[:]
variable[s1] assign[=] call[name[ChannelDescriptionHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[s1]]
if compare[name[ChannelMode2_presence] is constant[1]] begin[:]
variable[t] assign[=] call[name[ChannelMode2Hdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[t]]
if compare[name[FrequencyChannelSequence_presence] is constant[1]] begin[:]
variable[u] assign[=] call[name[FrequencyChannelSequenceHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[u]]
if compare[name[MobileAllocation_presence] is constant[1]] begin[:]
variable[v] assign[=] call[name[MobileAllocationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[v]]
if compare[name[StartingTime_presence] is constant[1]] begin[:]
variable[w] assign[=] call[name[StartingTimeHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[w]]
if compare[name[TimeDifference_presence] is constant[1]] begin[:]
variable[x] assign[=] call[name[TimeDifferenceHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[x]]
if compare[name[TimingAdvance_presence] is constant[1]] begin[:]
variable[y] assign[=] call[name[TimingAdvanceHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[y]]
if compare[name[FrequencyShortList_presence1] is constant[1]] begin[:]
variable[z] assign[=] call[name[FrequencyShortListHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[z]]
if compare[name[FrequencyList_presence1] is constant[1]] begin[:]
variable[aa] assign[=] call[name[FrequencyListHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[aa]]
if compare[name[ChannelDescription2_presence] is constant[1]] begin[:]
variable[ab] assign[=] call[name[ChannelDescription2Hdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ab]]
if compare[name[ChannelDescription_presence2] is constant[1]] begin[:]
variable[ac] assign[=] call[name[ChannelDescriptionHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ac]]
if compare[name[FrequencyChannelSequence_presence1] is constant[1]] begin[:]
variable[ad] assign[=] call[name[FrequencyChannelSequenceHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ad]]
if compare[name[MobileAllocation_presence1] is constant[1]] begin[:]
variable[ae] assign[=] call[name[MobileAllocationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ae]]
if compare[name[CipherModeSetting_presence] is constant[1]] begin[:]
variable[af] assign[=] call[name[CipherModeSettingHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[af]]
if compare[name[VgcsTargetModeIdentication_presence] is constant[1]] begin[:]
variable[ag] assign[=] call[name[VgcsTargetModeIdenticationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ag]]
if compare[name[MultiRateConfiguration_presence] is constant[1]] begin[:]
variable[ah] assign[=] call[name[MultiRateConfigurationHdr], parameter[]]
variable[packet] assign[=] binary_operation[name[packet] / name[ah]]
return[name[packet]] | keyword[def] identifier[handoverCommand] ( identifier[SynchronizationIndication_presence] = literal[int] ,
identifier[FrequencyShortList_presence] = literal[int] , identifier[FrequencyList_presence] = literal[int] ,
identifier[CellChannelDescription_presence] = literal[int] ,
identifier[MultislotAllocation_presence] = literal[int] ,
identifier[ChannelMode_presence] = literal[int] , identifier[ChannelMode_presence1] = literal[int] ,
identifier[ChannelMode_presence2] = literal[int] ,
identifier[ChannelMode_presence3] = literal[int] , identifier[ChannelMode_presence4] = literal[int] ,
identifier[ChannelMode_presence5] = literal[int] ,
identifier[ChannelMode_presence6] = literal[int] , identifier[ChannelMode_presence7] = literal[int] ,
identifier[ChannelDescription_presence1] = literal[int] , identifier[ChannelMode2_presence] = literal[int] ,
identifier[FrequencyChannelSequence_presence] = literal[int] ,
identifier[MobileAllocation_presence] = literal[int] ,
identifier[StartingTime_presence] = literal[int] , identifier[TimeDifference_presence] = literal[int] ,
identifier[TimingAdvance_presence] = literal[int] ,
identifier[FrequencyShortList_presence1] = literal[int] ,
identifier[FrequencyList_presence1] = literal[int] ,
identifier[ChannelDescription2_presence] = literal[int] ,
identifier[ChannelDescription_presence2] = literal[int] ,
identifier[FrequencyChannelSequence_presence1] = literal[int] ,
identifier[MobileAllocation_presence1] = literal[int] ,
identifier[CipherModeSetting_presence] = literal[int] ,
identifier[VgcsTargetModeIdentication_presence] = literal[int] ,
identifier[MultiRateConfiguration_presence] = literal[int] ):
literal[string]
identifier[name] = literal[string]
identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] )
identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] )
identifier[c] = identifier[CellDescription] ()
identifier[d] = identifier[ChannelDescription2] ()
identifier[e] = identifier[HandoverReference] ()
identifier[f] = identifier[PowerCommandAndAccessType] ()
identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] / identifier[e] / identifier[f]
keyword[if] identifier[SynchronizationIndication_presence] keyword[is] literal[int] :
identifier[g] = identifier[SynchronizationIndicationHdr] ( identifier[ieiSI] = literal[int] , identifier[eightBitSI] = literal[int] )
identifier[packet] = identifier[packet] / identifier[g]
keyword[if] identifier[FrequencyShortList_presence] keyword[is] literal[int] :
identifier[h] = identifier[FrequencyShortListHdr] ( identifier[ieiFSL] = literal[int] )
identifier[packet] = identifier[packet] / identifier[h]
keyword[if] identifier[FrequencyList_presence] keyword[is] literal[int] :
identifier[i] = identifier[FrequencyListHdr] ( identifier[ieiFL] = literal[int] , identifier[eightBitFL] = literal[int] )
identifier[packet] = identifier[packet] / identifier[i]
keyword[if] identifier[CellChannelDescription_presence] keyword[is] literal[int] :
identifier[j] = identifier[CellChannelDescriptionHdr] ( identifier[ieiCCD] = literal[int] , identifier[eightBitCCD] = literal[int] )
identifier[packet] = identifier[packet] / identifier[j]
keyword[if] identifier[MultislotAllocation_presence] keyword[is] literal[int] :
identifier[k] = identifier[MultislotAllocationHdr] ( identifier[ieiMSA] = literal[int] , identifier[eightBitMSA] = literal[int] )
identifier[packet] = identifier[packet] / identifier[k]
keyword[if] identifier[ChannelMode_presence] keyword[is] literal[int] :
identifier[l] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[l]
keyword[if] identifier[ChannelMode_presence1] keyword[is] literal[int] :
identifier[m] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[m]
keyword[if] identifier[ChannelMode_presence2] keyword[is] literal[int] :
identifier[n] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[n]
keyword[if] identifier[ChannelMode_presence3] keyword[is] literal[int] :
identifier[o] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[o]
keyword[if] identifier[ChannelMode_presence4] keyword[is] literal[int] :
identifier[p] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[p]
keyword[if] identifier[ChannelMode_presence5] keyword[is] literal[int] :
identifier[q] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[q]
keyword[if] identifier[ChannelMode_presence6] keyword[is] literal[int] :
identifier[r] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[r]
keyword[if] identifier[ChannelMode_presence7] keyword[is] literal[int] :
identifier[s] = identifier[ChannelModeHdr] ( identifier[ieiCM] = literal[int] , identifier[eightBitCM] = literal[int] )
identifier[packet] = identifier[packet] / identifier[s]
keyword[if] identifier[ChannelDescription_presence1] keyword[is] literal[int] :
identifier[s1] = identifier[ChannelDescriptionHdr] ( identifier[ieiCD] = literal[int] , identifier[eightBitCD] = literal[int] )
identifier[packet] = identifier[packet] / identifier[s1]
keyword[if] identifier[ChannelMode2_presence] keyword[is] literal[int] :
identifier[t] = identifier[ChannelMode2Hdr] ( identifier[ieiCM2] = literal[int] , identifier[eightBitCM2] = literal[int] )
identifier[packet] = identifier[packet] / identifier[t]
keyword[if] identifier[FrequencyChannelSequence_presence] keyword[is] literal[int] :
identifier[u] = identifier[FrequencyChannelSequenceHdr] ( identifier[ieiFCS] = literal[int] , identifier[eightBitFCS] = literal[int] )
identifier[packet] = identifier[packet] / identifier[u]
keyword[if] identifier[MobileAllocation_presence] keyword[is] literal[int] :
identifier[v] = identifier[MobileAllocationHdr] ( identifier[ieiMA] = literal[int] , identifier[eightBitMA] = literal[int] )
identifier[packet] = identifier[packet] / identifier[v]
keyword[if] identifier[StartingTime_presence] keyword[is] literal[int] :
identifier[w] = identifier[StartingTimeHdr] ( identifier[ieiST] = literal[int] , identifier[eightBitST] = literal[int] )
identifier[packet] = identifier[packet] / identifier[w]
keyword[if] identifier[TimeDifference_presence] keyword[is] literal[int] :
identifier[x] = identifier[TimeDifferenceHdr] ( identifier[ieiTD] = literal[int] , identifier[eightBitTD] = literal[int] )
identifier[packet] = identifier[packet] / identifier[x]
keyword[if] identifier[TimingAdvance_presence] keyword[is] literal[int] :
identifier[y] = identifier[TimingAdvanceHdr] ( identifier[ieiTA] = literal[int] , identifier[eightBitTA] = literal[int] )
identifier[packet] = identifier[packet] / identifier[y]
keyword[if] identifier[FrequencyShortList_presence1] keyword[is] literal[int] :
identifier[z] = identifier[FrequencyShortListHdr] ( identifier[ieiFSL] = literal[int] )
identifier[packet] = identifier[packet] / identifier[z]
keyword[if] identifier[FrequencyList_presence1] keyword[is] literal[int] :
identifier[aa] = identifier[FrequencyListHdr] ( identifier[ieiFL] = literal[int] , identifier[eightBitFL] = literal[int] )
identifier[packet] = identifier[packet] / identifier[aa]
keyword[if] identifier[ChannelDescription2_presence] keyword[is] literal[int] :
identifier[ab] = identifier[ChannelDescription2Hdr] ( identifier[ieiCD2] = literal[int] , identifier[eightBitCD2] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ab]
keyword[if] identifier[ChannelDescription_presence2] keyword[is] literal[int] :
identifier[ac] = identifier[ChannelDescriptionHdr] ( identifier[ieiCD] = literal[int] , identifier[eightBitCD] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ac]
keyword[if] identifier[FrequencyChannelSequence_presence1] keyword[is] literal[int] :
identifier[ad] = identifier[FrequencyChannelSequenceHdr] ( identifier[ieiFCS] = literal[int] , identifier[eightBitFCS] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ad]
keyword[if] identifier[MobileAllocation_presence1] keyword[is] literal[int] :
identifier[ae] = identifier[MobileAllocationHdr] ( identifier[ieiMA] = literal[int] , identifier[eightBitMA] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ae]
keyword[if] identifier[CipherModeSetting_presence] keyword[is] literal[int] :
identifier[af] = identifier[CipherModeSettingHdr] ( identifier[ieiCMS] = literal[int] , identifier[eightBitCMS] = literal[int] )
identifier[packet] = identifier[packet] / identifier[af]
keyword[if] identifier[VgcsTargetModeIdentication_presence] keyword[is] literal[int] :
identifier[ag] = identifier[VgcsTargetModeIdenticationHdr] ( identifier[ieiVTMI] = literal[int] , identifier[eightBitVTMI] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ag]
keyword[if] identifier[MultiRateConfiguration_presence] keyword[is] literal[int] :
identifier[ah] = identifier[MultiRateConfigurationHdr] ( identifier[ieiMRC] = literal[int] , identifier[eightBitMRC] = literal[int] )
identifier[packet] = identifier[packet] / identifier[ah]
keyword[return] identifier[packet] | def handoverCommand(SynchronizationIndication_presence=0, FrequencyShortList_presence=0, FrequencyList_presence=0, CellChannelDescription_presence=0, MultislotAllocation_presence=0, ChannelMode_presence=0, ChannelMode_presence1=0, ChannelMode_presence2=0, ChannelMode_presence3=0, ChannelMode_presence4=0, ChannelMode_presence5=0, ChannelMode_presence6=0, ChannelMode_presence7=0, ChannelDescription_presence1=0, ChannelMode2_presence=0, FrequencyChannelSequence_presence=0, MobileAllocation_presence=0, StartingTime_presence=0, TimeDifference_presence=0, TimingAdvance_presence=0, FrequencyShortList_presence1=0, FrequencyList_presence1=0, ChannelDescription2_presence=0, ChannelDescription_presence2=0, FrequencyChannelSequence_presence1=0, MobileAllocation_presence1=0, CipherModeSetting_presence=0, VgcsTargetModeIdentication_presence=0, MultiRateConfiguration_presence=0):
"""HANDOVER COMMAND Section 9.1.15"""
name = 'Handover Command'
a = TpPd(pd=6)
b = MessageType(mesType=43) # 00101011
c = CellDescription()
d = ChannelDescription2()
e = HandoverReference()
f = PowerCommandAndAccessType()
packet = a / b / c / d / e / f
if SynchronizationIndication_presence is 1:
g = SynchronizationIndicationHdr(ieiSI=13, eightBitSI=0)
packet = packet / g # depends on [control=['if'], data=[]]
if FrequencyShortList_presence is 1:
h = FrequencyShortListHdr(ieiFSL=2)
packet = packet / h # depends on [control=['if'], data=[]]
if FrequencyList_presence is 1:
i = FrequencyListHdr(ieiFL=5, eightBitFL=0)
packet = packet / i # depends on [control=['if'], data=[]]
if CellChannelDescription_presence is 1:
j = CellChannelDescriptionHdr(ieiCCD=98, eightBitCCD=0)
packet = packet / j # depends on [control=['if'], data=[]]
if MultislotAllocation_presence is 1:
k = MultislotAllocationHdr(ieiMSA=16, eightBitMSA=0)
packet = packet / k # depends on [control=['if'], data=[]]
if ChannelMode_presence is 1:
l = ChannelModeHdr(ieiCM=99, eightBitCM=0)
packet = packet / l # depends on [control=['if'], data=[]]
if ChannelMode_presence1 is 1:
m = ChannelModeHdr(ieiCM=17, eightBitCM=0)
packet = packet / m # depends on [control=['if'], data=[]]
if ChannelMode_presence2 is 1:
n = ChannelModeHdr(ieiCM=19, eightBitCM=0)
packet = packet / n # depends on [control=['if'], data=[]]
if ChannelMode_presence3 is 1:
o = ChannelModeHdr(ieiCM=20, eightBitCM=0)
packet = packet / o # depends on [control=['if'], data=[]]
if ChannelMode_presence4 is 1:
p = ChannelModeHdr(ieiCM=21, eightBitCM=0)
packet = packet / p # depends on [control=['if'], data=[]]
if ChannelMode_presence5 is 1:
q = ChannelModeHdr(ieiCM=22, eightBitCM=0)
packet = packet / q # depends on [control=['if'], data=[]]
if ChannelMode_presence6 is 1:
r = ChannelModeHdr(ieiCM=23, eightBitCM=0)
packet = packet / r # depends on [control=['if'], data=[]]
if ChannelMode_presence7 is 1:
s = ChannelModeHdr(ieiCM=24, eightBitCM=0)
packet = packet / s # depends on [control=['if'], data=[]]
if ChannelDescription_presence1 is 1:
s1 = ChannelDescriptionHdr(ieiCD=100, eightBitCD=0)
packet = packet / s1 # depends on [control=['if'], data=[]]
if ChannelMode2_presence is 1:
t = ChannelMode2Hdr(ieiCM2=102, eightBitCM2=0)
packet = packet / t # depends on [control=['if'], data=[]]
if FrequencyChannelSequence_presence is 1:
u = FrequencyChannelSequenceHdr(ieiFCS=105, eightBitFCS=0)
packet = packet / u # depends on [control=['if'], data=[]]
if MobileAllocation_presence is 1:
v = MobileAllocationHdr(ieiMA=114, eightBitMA=0)
packet = packet / v # depends on [control=['if'], data=[]]
if StartingTime_presence is 1:
w = StartingTimeHdr(ieiST=124, eightBitST=0)
packet = packet / w # depends on [control=['if'], data=[]]
if TimeDifference_presence is 1:
x = TimeDifferenceHdr(ieiTD=123, eightBitTD=0)
packet = packet / x # depends on [control=['if'], data=[]]
if TimingAdvance_presence is 1:
y = TimingAdvanceHdr(ieiTA=125, eightBitTA=0)
packet = packet / y # depends on [control=['if'], data=[]]
if FrequencyShortList_presence1 is 1:
z = FrequencyShortListHdr(ieiFSL=18)
packet = packet / z # depends on [control=['if'], data=[]]
if FrequencyList_presence1 is 1:
aa = FrequencyListHdr(ieiFL=25, eightBitFL=0)
packet = packet / aa # depends on [control=['if'], data=[]]
if ChannelDescription2_presence is 1:
ab = ChannelDescription2Hdr(ieiCD2=28, eightBitCD2=0)
packet = packet / ab # depends on [control=['if'], data=[]]
if ChannelDescription_presence2 is 1:
ac = ChannelDescriptionHdr(ieiCD=29, eightBitCD=0)
packet = packet / ac # depends on [control=['if'], data=[]]
if FrequencyChannelSequence_presence1 is 1:
ad = FrequencyChannelSequenceHdr(ieiFCS=30, eightBitFCS=0)
packet = packet / ad # depends on [control=['if'], data=[]]
if MobileAllocation_presence1 is 1:
ae = MobileAllocationHdr(ieiMA=33, eightBitMA=0)
packet = packet / ae # depends on [control=['if'], data=[]]
if CipherModeSetting_presence is 1:
af = CipherModeSettingHdr(ieiCMS=9, eightBitCMS=0)
packet = packet / af # depends on [control=['if'], data=[]]
if VgcsTargetModeIdentication_presence is 1:
ag = VgcsTargetModeIdenticationHdr(ieiVTMI=1, eightBitVTMI=0)
packet = packet / ag # depends on [control=['if'], data=[]]
if MultiRateConfiguration_presence is 1:
ah = MultiRateConfigurationHdr(ieiMRC=3, eightBitMRC=0)
packet = packet / ah # depends on [control=['if'], data=[]]
return packet |
def energy_ratio_by_chunks(x, param):
"""
Calculates the sum of squares of chunk i out of N chunks expressed as a ratio with the sum of squares over the whole
series.
Takes as input parameters the number num_segments of segments to divide the series into and segment_focus
which is the segment number (starting at zero) to return a feature on.
If the length of the time series is not a multiple of the number of segments, the remaining data points are
distributed on the bins starting from the first. For example, if your time series consists of 8 entries, the
first two bins will contain 3 and the last two values, e.g. `[ 0., 1., 2.], [ 3., 4., 5.]` and `[ 6., 7.]`.
Note that the answer for `num_segments = 1` is a trivial "1" but we handle this scenario
in case somebody calls it. Sum of the ratios should be 1.0.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param param: contains dictionaries {"num_segments": N, "segment_focus": i} with N, i both ints
:return: the feature values
:return type: list of tuples (index, data)
"""
res_data = []
res_index = []
full_series_energy = np.sum(x ** 2)
for parameter_combination in param:
num_segments = parameter_combination["num_segments"]
segment_focus = parameter_combination["segment_focus"]
assert segment_focus < num_segments
assert num_segments > 0
res_data.append(np.sum(np.array_split(x, num_segments)[segment_focus] ** 2.0)/full_series_energy)
res_index.append("num_segments_{}__segment_focus_{}".format(num_segments, segment_focus))
return list(zip(res_index, res_data)) | def function[energy_ratio_by_chunks, parameter[x, param]]:
constant[
Calculates the sum of squares of chunk i out of N chunks expressed as a ratio with the sum of squares over the whole
series.
Takes as input parameters the number num_segments of segments to divide the series into and segment_focus
which is the segment number (starting at zero) to return a feature on.
If the length of the time series is not a multiple of the number of segments, the remaining data points are
distributed on the bins starting from the first. For example, if your time series consists of 8 entries, the
first two bins will contain 3 and the last two values, e.g. `[ 0., 1., 2.], [ 3., 4., 5.]` and `[ 6., 7.]`.
Note that the answer for `num_segments = 1` is a trivial "1" but we handle this scenario
in case somebody calls it. Sum of the ratios should be 1.0.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param param: contains dictionaries {"num_segments": N, "segment_focus": i} with N, i both ints
:return: the feature values
:return type: list of tuples (index, data)
]
variable[res_data] assign[=] list[[]]
variable[res_index] assign[=] list[[]]
variable[full_series_energy] assign[=] call[name[np].sum, parameter[binary_operation[name[x] ** constant[2]]]]
for taget[name[parameter_combination]] in starred[name[param]] begin[:]
variable[num_segments] assign[=] call[name[parameter_combination]][constant[num_segments]]
variable[segment_focus] assign[=] call[name[parameter_combination]][constant[segment_focus]]
assert[compare[name[segment_focus] less[<] name[num_segments]]]
assert[compare[name[num_segments] greater[>] constant[0]]]
call[name[res_data].append, parameter[binary_operation[call[name[np].sum, parameter[binary_operation[call[call[name[np].array_split, parameter[name[x], name[num_segments]]]][name[segment_focus]] ** constant[2.0]]]] / name[full_series_energy]]]]
call[name[res_index].append, parameter[call[constant[num_segments_{}__segment_focus_{}].format, parameter[name[num_segments], name[segment_focus]]]]]
return[call[name[list], parameter[call[name[zip], parameter[name[res_index], name[res_data]]]]]] | keyword[def] identifier[energy_ratio_by_chunks] ( identifier[x] , identifier[param] ):
literal[string]
identifier[res_data] =[]
identifier[res_index] =[]
identifier[full_series_energy] = identifier[np] . identifier[sum] ( identifier[x] ** literal[int] )
keyword[for] identifier[parameter_combination] keyword[in] identifier[param] :
identifier[num_segments] = identifier[parameter_combination] [ literal[string] ]
identifier[segment_focus] = identifier[parameter_combination] [ literal[string] ]
keyword[assert] identifier[segment_focus] < identifier[num_segments]
keyword[assert] identifier[num_segments] > literal[int]
identifier[res_data] . identifier[append] ( identifier[np] . identifier[sum] ( identifier[np] . identifier[array_split] ( identifier[x] , identifier[num_segments] )[ identifier[segment_focus] ]** literal[int] )/ identifier[full_series_energy] )
identifier[res_index] . identifier[append] ( literal[string] . identifier[format] ( identifier[num_segments] , identifier[segment_focus] ))
keyword[return] identifier[list] ( identifier[zip] ( identifier[res_index] , identifier[res_data] )) | def energy_ratio_by_chunks(x, param):
"""
Calculates the sum of squares of chunk i out of N chunks expressed as a ratio with the sum of squares over the whole
series.
Takes as input parameters the number num_segments of segments to divide the series into and segment_focus
which is the segment number (starting at zero) to return a feature on.
If the length of the time series is not a multiple of the number of segments, the remaining data points are
distributed on the bins starting from the first. For example, if your time series consists of 8 entries, the
first two bins will contain 3 and the last two values, e.g. `[ 0., 1., 2.], [ 3., 4., 5.]` and `[ 6., 7.]`.
Note that the answer for `num_segments = 1` is a trivial "1" but we handle this scenario
in case somebody calls it. Sum of the ratios should be 1.0.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param param: contains dictionaries {"num_segments": N, "segment_focus": i} with N, i both ints
:return: the feature values
:return type: list of tuples (index, data)
"""
res_data = []
res_index = []
full_series_energy = np.sum(x ** 2)
for parameter_combination in param:
num_segments = parameter_combination['num_segments']
segment_focus = parameter_combination['segment_focus']
assert segment_focus < num_segments
assert num_segments > 0
res_data.append(np.sum(np.array_split(x, num_segments)[segment_focus] ** 2.0) / full_series_energy)
res_index.append('num_segments_{}__segment_focus_{}'.format(num_segments, segment_focus)) # depends on [control=['for'], data=['parameter_combination']]
return list(zip(res_index, res_data)) |
def calculate_integral(self, T1, T2):
r'''Method to compute the enthalpy integral of heat capacity from
`T1` to `T2`. Analytically integrates across the piecewise spline
as necessary.
Parameters
----------
T1 : float
Initial temperature, [K]
T2 : float
Final temperature, [K]
Returns
-------
dS : float
Enthalpy difference between `T1` and `T2`, [J/mol/K]
'''
# Simplify the problem so we can assume T2 >= T1
if T2 < T1:
flipped = True
T1, T2 = T2, T1
else:
flipped = False
# Fastest case - only one coefficient set, occurs surprisingly often
if self.n == 1:
dH = (Zabransky_cubic_integral(T2, *self.coeff_sets[0])
- Zabransky_cubic_integral(T1, *self.coeff_sets[0]))
else:
ind_T1, ind_T2 = self._coeff_ind_from_T(T1), self._coeff_ind_from_T(T2)
# Second fastest case - both are in the same coefficient set
if ind_T1 == ind_T2:
dH = (Zabransky_cubic_integral(T2, *self.coeff_sets[ind_T2])
- Zabransky_cubic_integral(T1, *self.coeff_sets[ind_T1]))
# Fo through the loop if we need to - inevitably slow
else:
dH = (Zabransky_cubic_integral(self.Ts[ind_T1], *self.coeff_sets[ind_T1])
- Zabransky_cubic_integral(T1, *self.coeff_sets[ind_T1]))
for i in range(ind_T1, ind_T2):
diff =(Zabransky_cubic_integral(self.Ts[i+1], *self.coeff_sets[i])
- Zabransky_cubic_integral(self.Ts[i], *self.coeff_sets[i]))
dH += diff
end = (Zabransky_cubic_integral(T2, *self.coeff_sets[ind_T2])
- Zabransky_cubic_integral(self.Ts[ind_T2], *self.coeff_sets[ind_T2]))
dH += end
return -dH if flipped else dH | def function[calculate_integral, parameter[self, T1, T2]]:
constant[Method to compute the enthalpy integral of heat capacity from
`T1` to `T2`. Analytically integrates across the piecewise spline
as necessary.
Parameters
----------
T1 : float
Initial temperature, [K]
T2 : float
Final temperature, [K]
Returns
-------
dS : float
Enthalpy difference between `T1` and `T2`, [J/mol/K]
]
if compare[name[T2] less[<] name[T1]] begin[:]
variable[flipped] assign[=] constant[True]
<ast.Tuple object at 0x7da1b021dc00> assign[=] tuple[[<ast.Name object at 0x7da1b021ed40>, <ast.Name object at 0x7da1b021f1c0>]]
if compare[name[self].n equal[==] constant[1]] begin[:]
variable[dH] assign[=] binary_operation[call[name[Zabransky_cubic_integral], parameter[name[T2], <ast.Starred object at 0x7da1b021f610>]] - call[name[Zabransky_cubic_integral], parameter[name[T1], <ast.Starred object at 0x7da1b021ebf0>]]]
return[<ast.IfExp object at 0x7da20c76ebc0>] | keyword[def] identifier[calculate_integral] ( identifier[self] , identifier[T1] , identifier[T2] ):
literal[string]
keyword[if] identifier[T2] < identifier[T1] :
identifier[flipped] = keyword[True]
identifier[T1] , identifier[T2] = identifier[T2] , identifier[T1]
keyword[else] :
identifier[flipped] = keyword[False]
keyword[if] identifier[self] . identifier[n] == literal[int] :
identifier[dH] =( identifier[Zabransky_cubic_integral] ( identifier[T2] ,* identifier[self] . identifier[coeff_sets] [ literal[int] ])
- identifier[Zabransky_cubic_integral] ( identifier[T1] ,* identifier[self] . identifier[coeff_sets] [ literal[int] ]))
keyword[else] :
identifier[ind_T1] , identifier[ind_T2] = identifier[self] . identifier[_coeff_ind_from_T] ( identifier[T1] ), identifier[self] . identifier[_coeff_ind_from_T] ( identifier[T2] )
keyword[if] identifier[ind_T1] == identifier[ind_T2] :
identifier[dH] =( identifier[Zabransky_cubic_integral] ( identifier[T2] ,* identifier[self] . identifier[coeff_sets] [ identifier[ind_T2] ])
- identifier[Zabransky_cubic_integral] ( identifier[T1] ,* identifier[self] . identifier[coeff_sets] [ identifier[ind_T1] ]))
keyword[else] :
identifier[dH] =( identifier[Zabransky_cubic_integral] ( identifier[self] . identifier[Ts] [ identifier[ind_T1] ],* identifier[self] . identifier[coeff_sets] [ identifier[ind_T1] ])
- identifier[Zabransky_cubic_integral] ( identifier[T1] ,* identifier[self] . identifier[coeff_sets] [ identifier[ind_T1] ]))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ind_T1] , identifier[ind_T2] ):
identifier[diff] =( identifier[Zabransky_cubic_integral] ( identifier[self] . identifier[Ts] [ identifier[i] + literal[int] ],* identifier[self] . identifier[coeff_sets] [ identifier[i] ])
- identifier[Zabransky_cubic_integral] ( identifier[self] . identifier[Ts] [ identifier[i] ],* identifier[self] . identifier[coeff_sets] [ identifier[i] ]))
identifier[dH] += identifier[diff]
identifier[end] =( identifier[Zabransky_cubic_integral] ( identifier[T2] ,* identifier[self] . identifier[coeff_sets] [ identifier[ind_T2] ])
- identifier[Zabransky_cubic_integral] ( identifier[self] . identifier[Ts] [ identifier[ind_T2] ],* identifier[self] . identifier[coeff_sets] [ identifier[ind_T2] ]))
identifier[dH] += identifier[end]
keyword[return] - identifier[dH] keyword[if] identifier[flipped] keyword[else] identifier[dH] | def calculate_integral(self, T1, T2):
"""Method to compute the enthalpy integral of heat capacity from
`T1` to `T2`. Analytically integrates across the piecewise spline
as necessary.
Parameters
----------
T1 : float
Initial temperature, [K]
T2 : float
Final temperature, [K]
Returns
-------
dS : float
Enthalpy difference between `T1` and `T2`, [J/mol/K]
"""
# Simplify the problem so we can assume T2 >= T1
if T2 < T1:
flipped = True
(T1, T2) = (T2, T1) # depends on [control=['if'], data=['T2', 'T1']]
else:
flipped = False
# Fastest case - only one coefficient set, occurs surprisingly often
if self.n == 1:
dH = Zabransky_cubic_integral(T2, *self.coeff_sets[0]) - Zabransky_cubic_integral(T1, *self.coeff_sets[0]) # depends on [control=['if'], data=[]]
else:
(ind_T1, ind_T2) = (self._coeff_ind_from_T(T1), self._coeff_ind_from_T(T2))
# Second fastest case - both are in the same coefficient set
if ind_T1 == ind_T2:
dH = Zabransky_cubic_integral(T2, *self.coeff_sets[ind_T2]) - Zabransky_cubic_integral(T1, *self.coeff_sets[ind_T1]) # depends on [control=['if'], data=['ind_T1', 'ind_T2']]
else: # Fo through the loop if we need to - inevitably slow
dH = Zabransky_cubic_integral(self.Ts[ind_T1], *self.coeff_sets[ind_T1]) - Zabransky_cubic_integral(T1, *self.coeff_sets[ind_T1])
for i in range(ind_T1, ind_T2):
diff = Zabransky_cubic_integral(self.Ts[i + 1], *self.coeff_sets[i]) - Zabransky_cubic_integral(self.Ts[i], *self.coeff_sets[i])
dH += diff # depends on [control=['for'], data=['i']]
end = Zabransky_cubic_integral(T2, *self.coeff_sets[ind_T2]) - Zabransky_cubic_integral(self.Ts[ind_T2], *self.coeff_sets[ind_T2])
dH += end
return -dH if flipped else dH |
async def delete_group_memory(self, memory_id):
"""Delete group memory."""
act = self.service.action("X_DeleteGroupMemory")
res = await act.async_call(MemoryID=memory_id) | <ast.AsyncFunctionDef object at 0x7da18dc07e80> | keyword[async] keyword[def] identifier[delete_group_memory] ( identifier[self] , identifier[memory_id] ):
literal[string]
identifier[act] = identifier[self] . identifier[service] . identifier[action] ( literal[string] )
identifier[res] = keyword[await] identifier[act] . identifier[async_call] ( identifier[MemoryID] = identifier[memory_id] ) | async def delete_group_memory(self, memory_id):
"""Delete group memory."""
act = self.service.action('X_DeleteGroupMemory')
res = await act.async_call(MemoryID=memory_id) |
def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method buildjs this method will be
# called instead of the method defined here
# when this subclass method is entered it does call
# the method buildjschart defined here
self.buildjschart()
self.htmlcontent = self.template_chart_nvd3.render(chart=self) | def function[buildcontent, parameter[self]]:
constant[Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
]
call[name[self].buildcontainer, parameter[]]
call[name[self].buildjschart, parameter[]]
name[self].htmlcontent assign[=] call[name[self].template_chart_nvd3.render, parameter[]] | keyword[def] identifier[buildcontent] ( identifier[self] ):
literal[string]
identifier[self] . identifier[buildcontainer] ()
identifier[self] . identifier[buildjschart] ()
identifier[self] . identifier[htmlcontent] = identifier[self] . identifier[template_chart_nvd3] . identifier[render] ( identifier[chart] = identifier[self] ) | def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method buildjs this method will be
# called instead of the method defined here
# when this subclass method is entered it does call
# the method buildjschart defined here
self.buildjschart()
self.htmlcontent = self.template_chart_nvd3.render(chart=self) |
def get_stream_handle(stream=sys.stdout):
"""
Get the OS appropriate handle for the corresponding output stream.
:param str stream: The the stream to get the handle for
:return: A handle to the appropriate stream, either a ctypes buffer
or **sys.stdout** or **sys.stderr**.
"""
handle = stream
if os.name == "nt":
from ._winconsole import get_stream_handle as get_win_stream_handle
return get_win_stream_handle(stream)
return handle | def function[get_stream_handle, parameter[stream]]:
constant[
Get the OS appropriate handle for the corresponding output stream.
:param str stream: The the stream to get the handle for
:return: A handle to the appropriate stream, either a ctypes buffer
or **sys.stdout** or **sys.stderr**.
]
variable[handle] assign[=] name[stream]
if compare[name[os].name equal[==] constant[nt]] begin[:]
from relative_module[_winconsole] import module[get_stream_handle]
return[call[name[get_win_stream_handle], parameter[name[stream]]]]
return[name[handle]] | keyword[def] identifier[get_stream_handle] ( identifier[stream] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[handle] = identifier[stream]
keyword[if] identifier[os] . identifier[name] == literal[string] :
keyword[from] . identifier[_winconsole] keyword[import] identifier[get_stream_handle] keyword[as] identifier[get_win_stream_handle]
keyword[return] identifier[get_win_stream_handle] ( identifier[stream] )
keyword[return] identifier[handle] | def get_stream_handle(stream=sys.stdout):
"""
Get the OS appropriate handle for the corresponding output stream.
:param str stream: The the stream to get the handle for
:return: A handle to the appropriate stream, either a ctypes buffer
or **sys.stdout** or **sys.stderr**.
"""
handle = stream
if os.name == 'nt':
from ._winconsole import get_stream_handle as get_win_stream_handle
return get_win_stream_handle(stream) # depends on [control=['if'], data=[]]
return handle |
def autoExpand(self, level=None):
"""
Returns whether or not to expand for the inputed level.
:param level | <int> || None
:return <bool>
"""
return self._autoExpand.get(level, self._autoExpand.get(None, False)) | def function[autoExpand, parameter[self, level]]:
constant[
Returns whether or not to expand for the inputed level.
:param level | <int> || None
:return <bool>
]
return[call[name[self]._autoExpand.get, parameter[name[level], call[name[self]._autoExpand.get, parameter[constant[None], constant[False]]]]]] | keyword[def] identifier[autoExpand] ( identifier[self] , identifier[level] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_autoExpand] . identifier[get] ( identifier[level] , identifier[self] . identifier[_autoExpand] . identifier[get] ( keyword[None] , keyword[False] )) | def autoExpand(self, level=None):
"""
Returns whether or not to expand for the inputed level.
:param level | <int> || None
:return <bool>
"""
return self._autoExpand.get(level, self._autoExpand.get(None, False)) |
def cli(ctx):
""" This is a command line app to get useful stats from a trello board
and report on them in useful ways.
Requires the following environment varilables:
TRELLOSTATS_APP_KEY=<your key here>
TRELLOSTATS_APP_TOKEN=<your token here>
"""
ctx.obj = dict()
ctx.obj['app_key'] = os.environ.get('TRELLOSTATS_APP_KEY')
ctx.obj['app_token'] = os.environ.get('TRELLOSTATS_APP_TOKEN')
init_db(db_proxy) | def function[cli, parameter[ctx]]:
constant[ This is a command line app to get useful stats from a trello board
and report on them in useful ways.
Requires the following environment varilables:
TRELLOSTATS_APP_KEY=<your key here>
TRELLOSTATS_APP_TOKEN=<your token here>
]
name[ctx].obj assign[=] call[name[dict], parameter[]]
call[name[ctx].obj][constant[app_key]] assign[=] call[name[os].environ.get, parameter[constant[TRELLOSTATS_APP_KEY]]]
call[name[ctx].obj][constant[app_token]] assign[=] call[name[os].environ.get, parameter[constant[TRELLOSTATS_APP_TOKEN]]]
call[name[init_db], parameter[name[db_proxy]]] | keyword[def] identifier[cli] ( identifier[ctx] ):
literal[string]
identifier[ctx] . identifier[obj] = identifier[dict] ()
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[os] . identifier[environ] . identifier[get] ( literal[string] )
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[os] . identifier[environ] . identifier[get] ( literal[string] )
identifier[init_db] ( identifier[db_proxy] ) | def cli(ctx):
""" This is a command line app to get useful stats from a trello board
and report on them in useful ways.
Requires the following environment varilables:
TRELLOSTATS_APP_KEY=<your key here>
TRELLOSTATS_APP_TOKEN=<your token here>
"""
ctx.obj = dict()
ctx.obj['app_key'] = os.environ.get('TRELLOSTATS_APP_KEY')
ctx.obj['app_token'] = os.environ.get('TRELLOSTATS_APP_TOKEN')
init_db(db_proxy) |
def write_base (self, url_data):
"""Write url_data.base_ref."""
self.write(self.part("base") + self.spaces("base"))
self.writeln(url_data.base_ref, color=self.colorbase) | def function[write_base, parameter[self, url_data]]:
constant[Write url_data.base_ref.]
call[name[self].write, parameter[binary_operation[call[name[self].part, parameter[constant[base]]] + call[name[self].spaces, parameter[constant[base]]]]]]
call[name[self].writeln, parameter[name[url_data].base_ref]] | keyword[def] identifier[write_base] ( identifier[self] , identifier[url_data] ):
literal[string]
identifier[self] . identifier[write] ( identifier[self] . identifier[part] ( literal[string] )+ identifier[self] . identifier[spaces] ( literal[string] ))
identifier[self] . identifier[writeln] ( identifier[url_data] . identifier[base_ref] , identifier[color] = identifier[self] . identifier[colorbase] ) | def write_base(self, url_data):
"""Write url_data.base_ref."""
self.write(self.part('base') + self.spaces('base'))
self.writeln(url_data.base_ref, color=self.colorbase) |
def CEscape(text, as_utf8):
"""Escape a bytes string for use in an ascii protocol buffer.
text.encode('string_escape') does not seem to satisfy our needs as it
encodes unprintable characters using two-digit hex escapes whereas our
C++ unescaping function allows hex escapes to be any length. So,
"\0011".encode('string_escape') ends up being "\\x011", which will be
decoded in C++ as a single-character string with char code 0x11.
Args:
text: A byte string to be escaped
as_utf8: Specifies if result should be returned in UTF-8 encoding
Returns:
Escaped string
"""
# PY3 hack: make Ord work for str and bytes:
# //platforms/networking/data uses unicode here, hence basestring.
Ord = ord if isinstance(text, six.string_types) else lambda x: x
if as_utf8:
return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text)
return ''.join(_cescape_byte_to_str[Ord(c)] for c in text) | def function[CEscape, parameter[text, as_utf8]]:
constant[Escape a bytes string for use in an ascii protocol buffer.
text.encode('string_escape') does not seem to satisfy our needs as it
encodes unprintable characters using two-digit hex escapes whereas our
C++ unescaping function allows hex escapes to be any length. So,
"1".encode('string_escape') ends up being "\x011", which will be
decoded in C++ as a single-character string with char code 0x11.
Args:
text: A byte string to be escaped
as_utf8: Specifies if result should be returned in UTF-8 encoding
Returns:
Escaped string
]
variable[Ord] assign[=] <ast.IfExp object at 0x7da1b21d6590>
if name[as_utf8] begin[:]
return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b21d6290>]]]
return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b21d6020>]]] | keyword[def] identifier[CEscape] ( identifier[text] , identifier[as_utf8] ):
literal[string]
identifier[Ord] = identifier[ord] keyword[if] identifier[isinstance] ( identifier[text] , identifier[six] . identifier[string_types] ) keyword[else] keyword[lambda] identifier[x] : identifier[x]
keyword[if] identifier[as_utf8] :
keyword[return] literal[string] . identifier[join] ( identifier[_cescape_utf8_to_str] [ identifier[Ord] ( identifier[c] )] keyword[for] identifier[c] keyword[in] identifier[text] )
keyword[return] literal[string] . identifier[join] ( identifier[_cescape_byte_to_str] [ identifier[Ord] ( identifier[c] )] keyword[for] identifier[c] keyword[in] identifier[text] ) | def CEscape(text, as_utf8):
"""Escape a bytes string for use in an ascii protocol buffer.
text.encode('string_escape') does not seem to satisfy our needs as it
encodes unprintable characters using two-digit hex escapes whereas our
C++ unescaping function allows hex escapes to be any length. So,
"\x011".encode('string_escape') ends up being "\\x011", which will be
decoded in C++ as a single-character string with char code 0x11.
Args:
text: A byte string to be escaped
as_utf8: Specifies if result should be returned in UTF-8 encoding
Returns:
Escaped string
"""
# PY3 hack: make Ord work for str and bytes:
# //platforms/networking/data uses unicode here, hence basestring.
Ord = ord if isinstance(text, six.string_types) else lambda x: x
if as_utf8:
return ''.join((_cescape_utf8_to_str[Ord(c)] for c in text)) # depends on [control=['if'], data=[]]
return ''.join((_cescape_byte_to_str[Ord(c)] for c in text)) |
def VERSION(self):
"""TAN mechanism version"""
return int(re.match(r'^(\D+)(\d+)$', self.__class__.__name__).group(2)) | def function[VERSION, parameter[self]]:
constant[TAN mechanism version]
return[call[name[int], parameter[call[call[name[re].match, parameter[constant[^(\D+)(\d+)$], name[self].__class__.__name__]].group, parameter[constant[2]]]]]] | keyword[def] identifier[VERSION] ( identifier[self] ):
literal[string]
keyword[return] identifier[int] ( identifier[re] . identifier[match] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] ). identifier[group] ( literal[int] )) | def VERSION(self):
"""TAN mechanism version"""
return int(re.match('^(\\D+)(\\d+)$', self.__class__.__name__).group(2)) |
def _key_values(self, sn: "SequenceNode") -> Union[EntryKeys, EntryValue]:
"""Parse leaf-list value or list keys."""
try:
keys = self.up_to("/")
except EndOfInput:
keys = self.remaining()
if not keys:
raise UnexpectedInput(self, "entry value or keys")
if isinstance(sn, LeafListNode):
return EntryValue(unquote(keys))
ks = keys.split(",")
try:
if len(ks) != len(sn.keys):
raise UnexpectedInput(self, f"exactly {len(sn.keys)} keys")
except AttributeError:
raise BadSchemaNodeType(sn.qual_name, "list")
sel = {}
for j in range(len(ks)):
knod = sn.get_data_child(*sn.keys[j])
val = unquote(ks[j])
sel[(knod.name, None if knod.ns == sn.ns else knod.ns)] = val
return EntryKeys(sel) | def function[_key_values, parameter[self, sn]]:
constant[Parse leaf-list value or list keys.]
<ast.Try object at 0x7da1b02e6200>
if <ast.UnaryOp object at 0x7da1b02e5330> begin[:]
<ast.Raise object at 0x7da1b02e50c0>
if call[name[isinstance], parameter[name[sn], name[LeafListNode]]] begin[:]
return[call[name[EntryValue], parameter[call[name[unquote], parameter[name[keys]]]]]]
variable[ks] assign[=] call[name[keys].split, parameter[constant[,]]]
<ast.Try object at 0x7da1b02e4f40>
variable[sel] assign[=] dictionary[[], []]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[ks]]]]]] begin[:]
variable[knod] assign[=] call[name[sn].get_data_child, parameter[<ast.Starred object at 0x7da1b04e15d0>]]
variable[val] assign[=] call[name[unquote], parameter[call[name[ks]][name[j]]]]
call[name[sel]][tuple[[<ast.Attribute object at 0x7da1b04e2800>, <ast.IfExp object at 0x7da1b04e1960>]]] assign[=] name[val]
return[call[name[EntryKeys], parameter[name[sel]]]] | keyword[def] identifier[_key_values] ( identifier[self] , identifier[sn] : literal[string] )-> identifier[Union] [ identifier[EntryKeys] , identifier[EntryValue] ]:
literal[string]
keyword[try] :
identifier[keys] = identifier[self] . identifier[up_to] ( literal[string] )
keyword[except] identifier[EndOfInput] :
identifier[keys] = identifier[self] . identifier[remaining] ()
keyword[if] keyword[not] identifier[keys] :
keyword[raise] identifier[UnexpectedInput] ( identifier[self] , literal[string] )
keyword[if] identifier[isinstance] ( identifier[sn] , identifier[LeafListNode] ):
keyword[return] identifier[EntryValue] ( identifier[unquote] ( identifier[keys] ))
identifier[ks] = identifier[keys] . identifier[split] ( literal[string] )
keyword[try] :
keyword[if] identifier[len] ( identifier[ks] )!= identifier[len] ( identifier[sn] . identifier[keys] ):
keyword[raise] identifier[UnexpectedInput] ( identifier[self] , literal[string] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[BadSchemaNodeType] ( identifier[sn] . identifier[qual_name] , literal[string] )
identifier[sel] ={}
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[ks] )):
identifier[knod] = identifier[sn] . identifier[get_data_child] (* identifier[sn] . identifier[keys] [ identifier[j] ])
identifier[val] = identifier[unquote] ( identifier[ks] [ identifier[j] ])
identifier[sel] [( identifier[knod] . identifier[name] , keyword[None] keyword[if] identifier[knod] . identifier[ns] == identifier[sn] . identifier[ns] keyword[else] identifier[knod] . identifier[ns] )]= identifier[val]
keyword[return] identifier[EntryKeys] ( identifier[sel] ) | def _key_values(self, sn: 'SequenceNode') -> Union[EntryKeys, EntryValue]:
"""Parse leaf-list value or list keys."""
try:
keys = self.up_to('/') # depends on [control=['try'], data=[]]
except EndOfInput:
keys = self.remaining() # depends on [control=['except'], data=[]]
if not keys:
raise UnexpectedInput(self, 'entry value or keys') # depends on [control=['if'], data=[]]
if isinstance(sn, LeafListNode):
return EntryValue(unquote(keys)) # depends on [control=['if'], data=[]]
ks = keys.split(',')
try:
if len(ks) != len(sn.keys):
raise UnexpectedInput(self, f'exactly {len(sn.keys)} keys') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
raise BadSchemaNodeType(sn.qual_name, 'list') # depends on [control=['except'], data=[]]
sel = {}
for j in range(len(ks)):
knod = sn.get_data_child(*sn.keys[j])
val = unquote(ks[j])
sel[knod.name, None if knod.ns == sn.ns else knod.ns] = val # depends on [control=['for'], data=['j']]
return EntryKeys(sel) |
def split_by_fixed_size_onefile(infile, outfile, chunk_size, tolerance, skip_if_all_Ns=False):
'''Splits each sequence in infile into chunks of fixed size, last chunk can be up to
(chunk_size + tolerance) in length'''
seq_reader = sequences.file_reader(infile)
f_out = utils.open_file_write(outfile)
for seq in seq_reader:
for i in range(0, len(seq), chunk_size):
if i + chunk_size + tolerance >= len(seq):
end = len(seq)
else:
end = i + chunk_size
subseq = seq.subseq(i, end)
if not (skip_if_all_Ns and subseq.is_all_Ns()):
subseq.id += '.' + str(i+1) + '_' + str(end)
print(subseq, file=f_out)
if end == len(seq):
break
utils.close(f_out) | def function[split_by_fixed_size_onefile, parameter[infile, outfile, chunk_size, tolerance, skip_if_all_Ns]]:
constant[Splits each sequence in infile into chunks of fixed size, last chunk can be up to
(chunk_size + tolerance) in length]
variable[seq_reader] assign[=] call[name[sequences].file_reader, parameter[name[infile]]]
variable[f_out] assign[=] call[name[utils].open_file_write, parameter[name[outfile]]]
for taget[name[seq]] in starred[name[seq_reader]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[seq]]], name[chunk_size]]]] begin[:]
if compare[binary_operation[binary_operation[name[i] + name[chunk_size]] + name[tolerance]] greater_or_equal[>=] call[name[len], parameter[name[seq]]]] begin[:]
variable[end] assign[=] call[name[len], parameter[name[seq]]]
variable[subseq] assign[=] call[name[seq].subseq, parameter[name[i], name[end]]]
if <ast.UnaryOp object at 0x7da1afe1a5c0> begin[:]
<ast.AugAssign object at 0x7da1afe1a6e0>
call[name[print], parameter[name[subseq]]]
if compare[name[end] equal[==] call[name[len], parameter[name[seq]]]] begin[:]
break
call[name[utils].close, parameter[name[f_out]]] | keyword[def] identifier[split_by_fixed_size_onefile] ( identifier[infile] , identifier[outfile] , identifier[chunk_size] , identifier[tolerance] , identifier[skip_if_all_Ns] = keyword[False] ):
literal[string]
identifier[seq_reader] = identifier[sequences] . identifier[file_reader] ( identifier[infile] )
identifier[f_out] = identifier[utils] . identifier[open_file_write] ( identifier[outfile] )
keyword[for] identifier[seq] keyword[in] identifier[seq_reader] :
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[seq] ), identifier[chunk_size] ):
keyword[if] identifier[i] + identifier[chunk_size] + identifier[tolerance] >= identifier[len] ( identifier[seq] ):
identifier[end] = identifier[len] ( identifier[seq] )
keyword[else] :
identifier[end] = identifier[i] + identifier[chunk_size]
identifier[subseq] = identifier[seq] . identifier[subseq] ( identifier[i] , identifier[end] )
keyword[if] keyword[not] ( identifier[skip_if_all_Ns] keyword[and] identifier[subseq] . identifier[is_all_Ns] ()):
identifier[subseq] . identifier[id] += literal[string] + identifier[str] ( identifier[i] + literal[int] )+ literal[string] + identifier[str] ( identifier[end] )
identifier[print] ( identifier[subseq] , identifier[file] = identifier[f_out] )
keyword[if] identifier[end] == identifier[len] ( identifier[seq] ):
keyword[break]
identifier[utils] . identifier[close] ( identifier[f_out] ) | def split_by_fixed_size_onefile(infile, outfile, chunk_size, tolerance, skip_if_all_Ns=False):
"""Splits each sequence in infile into chunks of fixed size, last chunk can be up to
(chunk_size + tolerance) in length"""
seq_reader = sequences.file_reader(infile)
f_out = utils.open_file_write(outfile)
for seq in seq_reader:
for i in range(0, len(seq), chunk_size):
if i + chunk_size + tolerance >= len(seq):
end = len(seq) # depends on [control=['if'], data=[]]
else:
end = i + chunk_size
subseq = seq.subseq(i, end)
if not (skip_if_all_Ns and subseq.is_all_Ns()):
subseq.id += '.' + str(i + 1) + '_' + str(end)
print(subseq, file=f_out) # depends on [control=['if'], data=[]]
if end == len(seq):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['seq']]
utils.close(f_out) |
def fix_nls(self, in_, out_):
"""Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
"""
if 0 == len(in_) or 0 == len(out_):
return out_
if "\r" in out_ and "\r" not in in_:
out_ = out_.replace("\r", '')
if "\n" == in_[0] and "\n" != out_[0]:
out_ = "\n" + out_
elif "\n" != in_[0] and "\n" == out_[0]:
out_ = out_.lstrip()
if 0 == len(out_):
pass
elif "\n" == in_[-1] and "\n" != out_[-1]:
out_ = out_ + "\n"
elif "\n" != in_[-1] and "\n" == out_[-1]:
out_ = out_.rstrip()
return out_ | def function[fix_nls, parameter[self, in_, out_]]:
constant[Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
]
if <ast.BoolOp object at 0x7da204347cd0> begin[:]
return[name[out_]]
if <ast.BoolOp object at 0x7da204344ee0> begin[:]
variable[out_] assign[=] call[name[out_].replace, parameter[constant[
], constant[]]]
if <ast.BoolOp object at 0x7da2043478b0> begin[:]
variable[out_] assign[=] binary_operation[constant[
] + name[out_]]
if compare[constant[0] equal[==] call[name[len], parameter[name[out_]]]] begin[:]
pass
return[name[out_]] | keyword[def] identifier[fix_nls] ( identifier[self] , identifier[in_] , identifier[out_] ):
literal[string]
keyword[if] literal[int] == identifier[len] ( identifier[in_] ) keyword[or] literal[int] == identifier[len] ( identifier[out_] ):
keyword[return] identifier[out_]
keyword[if] literal[string] keyword[in] identifier[out_] keyword[and] literal[string] keyword[not] keyword[in] identifier[in_] :
identifier[out_] = identifier[out_] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] == identifier[in_] [ literal[int] ] keyword[and] literal[string] != identifier[out_] [ literal[int] ]:
identifier[out_] = literal[string] + identifier[out_]
keyword[elif] literal[string] != identifier[in_] [ literal[int] ] keyword[and] literal[string] == identifier[out_] [ literal[int] ]:
identifier[out_] = identifier[out_] . identifier[lstrip] ()
keyword[if] literal[int] == identifier[len] ( identifier[out_] ):
keyword[pass]
keyword[elif] literal[string] == identifier[in_] [- literal[int] ] keyword[and] literal[string] != identifier[out_] [- literal[int] ]:
identifier[out_] = identifier[out_] + literal[string]
keyword[elif] literal[string] != identifier[in_] [- literal[int] ] keyword[and] literal[string] == identifier[out_] [- literal[int] ]:
identifier[out_] = identifier[out_] . identifier[rstrip] ()
keyword[return] identifier[out_] | def fix_nls(self, in_, out_):
"""Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
"""
if 0 == len(in_) or 0 == len(out_):
return out_ # depends on [control=['if'], data=[]]
if '\r' in out_ and '\r' not in in_:
out_ = out_.replace('\r', '') # depends on [control=['if'], data=[]]
if '\n' == in_[0] and '\n' != out_[0]:
out_ = '\n' + out_ # depends on [control=['if'], data=[]]
elif '\n' != in_[0] and '\n' == out_[0]:
out_ = out_.lstrip() # depends on [control=['if'], data=[]]
if 0 == len(out_):
pass # depends on [control=['if'], data=[]]
elif '\n' == in_[-1] and '\n' != out_[-1]:
out_ = out_ + '\n' # depends on [control=['if'], data=[]]
elif '\n' != in_[-1] and '\n' == out_[-1]:
out_ = out_.rstrip() # depends on [control=['if'], data=[]]
return out_ |
def rule_for(self, target_class):
"""Decorates and adds an authorization rule
for a specified class(es) of objects.
:param target_class: a class or an iterable with classes
to associate the rule with.
"""
def decorator(rule_class):
self.add_rule(rule_class, target_class)
return rule_class
return decorator | def function[rule_for, parameter[self, target_class]]:
constant[Decorates and adds an authorization rule
for a specified class(es) of objects.
:param target_class: a class or an iterable with classes
to associate the rule with.
]
def function[decorator, parameter[rule_class]]:
call[name[self].add_rule, parameter[name[rule_class], name[target_class]]]
return[name[rule_class]]
return[name[decorator]] | keyword[def] identifier[rule_for] ( identifier[self] , identifier[target_class] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[rule_class] ):
identifier[self] . identifier[add_rule] ( identifier[rule_class] , identifier[target_class] )
keyword[return] identifier[rule_class]
keyword[return] identifier[decorator] | def rule_for(self, target_class):
"""Decorates and adds an authorization rule
for a specified class(es) of objects.
:param target_class: a class or an iterable with classes
to associate the rule with.
"""
def decorator(rule_class):
self.add_rule(rule_class, target_class)
return rule_class
return decorator |
def reqAccountUpdatesMulti(
self, account: str = '', modelCode: str = ''):
"""
It is recommended to use :meth:`.accountValues` instead.
Request account values of multiple accounts and keep updated.
This method is blocking.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
"""
self._run(self.reqAccountUpdatesMultiAsync(account, modelCode)) | def function[reqAccountUpdatesMulti, parameter[self, account, modelCode]]:
constant[
It is recommended to use :meth:`.accountValues` instead.
Request account values of multiple accounts and keep updated.
This method is blocking.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
]
call[name[self]._run, parameter[call[name[self].reqAccountUpdatesMultiAsync, parameter[name[account], name[modelCode]]]]] | keyword[def] identifier[reqAccountUpdatesMulti] (
identifier[self] , identifier[account] : identifier[str] = literal[string] , identifier[modelCode] : identifier[str] = literal[string] ):
literal[string]
identifier[self] . identifier[_run] ( identifier[self] . identifier[reqAccountUpdatesMultiAsync] ( identifier[account] , identifier[modelCode] )) | def reqAccountUpdatesMulti(self, account: str='', modelCode: str=''):
"""
It is recommended to use :meth:`.accountValues` instead.
Request account values of multiple accounts and keep updated.
This method is blocking.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
"""
self._run(self.reqAccountUpdatesMultiAsync(account, modelCode)) |
def issuperset(self, other):
"""Report whether this RangeSet contains another set."""
self._binary_sanity_check(other)
return set.issuperset(self, other) | def function[issuperset, parameter[self, other]]:
constant[Report whether this RangeSet contains another set.]
call[name[self]._binary_sanity_check, parameter[name[other]]]
return[call[name[set].issuperset, parameter[name[self], name[other]]]] | keyword[def] identifier[issuperset] ( identifier[self] , identifier[other] ):
literal[string]
identifier[self] . identifier[_binary_sanity_check] ( identifier[other] )
keyword[return] identifier[set] . identifier[issuperset] ( identifier[self] , identifier[other] ) | def issuperset(self, other):
"""Report whether this RangeSet contains another set."""
self._binary_sanity_check(other)
return set.issuperset(self, other) |
def sort_args(args):
"""Put flags at the end"""
args = args.copy()
flags = [i for i in args if FLAGS_RE.match(i[1])]
for i in flags:
args.remove(i)
return args + flags | def function[sort_args, parameter[args]]:
constant[Put flags at the end]
variable[args] assign[=] call[name[args].copy, parameter[]]
variable[flags] assign[=] <ast.ListComp object at 0x7da2044c0910>
for taget[name[i]] in starred[name[flags]] begin[:]
call[name[args].remove, parameter[name[i]]]
return[binary_operation[name[args] + name[flags]]] | keyword[def] identifier[sort_args] ( identifier[args] ):
literal[string]
identifier[args] = identifier[args] . identifier[copy] ()
identifier[flags] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[args] keyword[if] identifier[FLAGS_RE] . identifier[match] ( identifier[i] [ literal[int] ])]
keyword[for] identifier[i] keyword[in] identifier[flags] :
identifier[args] . identifier[remove] ( identifier[i] )
keyword[return] identifier[args] + identifier[flags] | def sort_args(args):
"""Put flags at the end"""
args = args.copy()
flags = [i for i in args if FLAGS_RE.match(i[1])]
for i in flags:
args.remove(i) # depends on [control=['for'], data=['i']]
return args + flags |
def _n(name):
"""Return valid PySB name."""
n = name.encode('ascii', errors='ignore').decode('ascii')
n = re.sub('[^A-Za-z0-9_]', '_', n)
n = re.sub(r'(^[0-9].*)', r'p\1', n)
return n | def function[_n, parameter[name]]:
constant[Return valid PySB name.]
variable[n] assign[=] call[call[name[name].encode, parameter[constant[ascii]]].decode, parameter[constant[ascii]]]
variable[n] assign[=] call[name[re].sub, parameter[constant[[^A-Za-z0-9_]], constant[_], name[n]]]
variable[n] assign[=] call[name[re].sub, parameter[constant[(^[0-9].*)], constant[p\1], name[n]]]
return[name[n]] | keyword[def] identifier[_n] ( identifier[name] ):
literal[string]
identifier[n] = identifier[name] . identifier[encode] ( literal[string] , identifier[errors] = literal[string] ). identifier[decode] ( literal[string] )
identifier[n] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[n] )
identifier[n] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[n] )
keyword[return] identifier[n] | def _n(name):
"""Return valid PySB name."""
n = name.encode('ascii', errors='ignore').decode('ascii')
n = re.sub('[^A-Za-z0-9_]', '_', n)
n = re.sub('(^[0-9].*)', 'p\\1', n)
return n |
def terms_from_dict(source):
""" Convert a dict representing a query to a string.
Args:
source -- A dict with query xpaths as keys and text or nested query dicts as values.
Returns:
A string composed from the nested query terms given.
>>> terms_from_dict({'document': {'title': "Title this is", 'text': "A long text."}})
'<document><text>A long text.</text><title>Title this is</title></document>'
>>> terms_from_dict({'document/title': "Title this is", 'document/text': "A long text."})
'<document><title>Title this is</title></document><document><text>A long text.</text></document>'
"""
parsed = ''
for xpath, text in source.items():
if hasattr(text, 'keys'):
parsed += term(terms_from_dict(text), xpath, escape=False)
else:
parsed += term(text, xpath)
return parsed | def function[terms_from_dict, parameter[source]]:
constant[ Convert a dict representing a query to a string.
Args:
source -- A dict with query xpaths as keys and text or nested query dicts as values.
Returns:
A string composed from the nested query terms given.
>>> terms_from_dict({'document': {'title': "Title this is", 'text': "A long text."}})
'<document><text>A long text.</text><title>Title this is</title></document>'
>>> terms_from_dict({'document/title': "Title this is", 'document/text': "A long text."})
'<document><title>Title this is</title></document><document><text>A long text.</text></document>'
]
variable[parsed] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b26ad990>, <ast.Name object at 0x7da1b26aef50>]]] in starred[call[name[source].items, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[text], constant[keys]]] begin[:]
<ast.AugAssign object at 0x7da20c76f370>
return[name[parsed]] | keyword[def] identifier[terms_from_dict] ( identifier[source] ):
literal[string]
identifier[parsed] = literal[string]
keyword[for] identifier[xpath] , identifier[text] keyword[in] identifier[source] . identifier[items] ():
keyword[if] identifier[hasattr] ( identifier[text] , literal[string] ):
identifier[parsed] += identifier[term] ( identifier[terms_from_dict] ( identifier[text] ), identifier[xpath] , identifier[escape] = keyword[False] )
keyword[else] :
identifier[parsed] += identifier[term] ( identifier[text] , identifier[xpath] )
keyword[return] identifier[parsed] | def terms_from_dict(source):
""" Convert a dict representing a query to a string.
Args:
source -- A dict with query xpaths as keys and text or nested query dicts as values.
Returns:
A string composed from the nested query terms given.
>>> terms_from_dict({'document': {'title': "Title this is", 'text': "A long text."}})
'<document><text>A long text.</text><title>Title this is</title></document>'
>>> terms_from_dict({'document/title': "Title this is", 'document/text': "A long text."})
'<document><title>Title this is</title></document><document><text>A long text.</text></document>'
"""
parsed = ''
for (xpath, text) in source.items():
if hasattr(text, 'keys'):
parsed += term(terms_from_dict(text), xpath, escape=False) # depends on [control=['if'], data=[]]
else:
parsed += term(text, xpath) # depends on [control=['for'], data=[]]
return parsed |
def _request(self, resource, rtype, action=None, payload=None, offset=None, limit=None, requestId=None,
is_crud=False):
"""_request amqp queue publish helper
return: RequestEvent object or None for failed to publish
"""
end = self.__end
if end.is_set():
raise LinkShutdownException('Client stopped')
rng = None
if offset is not None and limit is not None:
Validation.limit_offset_check(limit, offset)
rng = "%d/%d" % (offset, limit)
with self.__requests:
if requestId is None:
requestId = self.__new_request_id()
elif requestId in self.__requests:
raise ValueError('requestId %s already in use' % requestId)
inner_msg = self.__make_innermsg(resource, rtype, requestId, action, payload, rng)
self.__requests[requestId] = ret = RequestEvent(requestId, inner_msg, is_crud=is_crud)
#
if not self.__retry_enqueue(PreparedMessage(inner_msg, requestId)):
raise LinkShutdownException('Client stopping')
return ret | def function[_request, parameter[self, resource, rtype, action, payload, offset, limit, requestId, is_crud]]:
constant[_request amqp queue publish helper
return: RequestEvent object or None for failed to publish
]
variable[end] assign[=] name[self].__end
if call[name[end].is_set, parameter[]] begin[:]
<ast.Raise object at 0x7da1b1b87ac0>
variable[rng] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b1b879a0> begin[:]
call[name[Validation].limit_offset_check, parameter[name[limit], name[offset]]]
variable[rng] assign[=] binary_operation[constant[%d/%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b87730>, <ast.Name object at 0x7da1b1b86830>]]]
with name[self].__requests begin[:]
if compare[name[requestId] is constant[None]] begin[:]
variable[requestId] assign[=] call[name[self].__new_request_id, parameter[]]
variable[inner_msg] assign[=] call[name[self].__make_innermsg, parameter[name[resource], name[rtype], name[requestId], name[action], name[payload], name[rng]]]
call[name[self].__requests][name[requestId]] assign[=] call[name[RequestEvent], parameter[name[requestId], name[inner_msg]]]
if <ast.UnaryOp object at 0x7da1b1b35120> begin[:]
<ast.Raise object at 0x7da1b1b365f0>
return[name[ret]] | keyword[def] identifier[_request] ( identifier[self] , identifier[resource] , identifier[rtype] , identifier[action] = keyword[None] , identifier[payload] = keyword[None] , identifier[offset] = keyword[None] , identifier[limit] = keyword[None] , identifier[requestId] = keyword[None] ,
identifier[is_crud] = keyword[False] ):
literal[string]
identifier[end] = identifier[self] . identifier[__end]
keyword[if] identifier[end] . identifier[is_set] ():
keyword[raise] identifier[LinkShutdownException] ( literal[string] )
identifier[rng] = keyword[None]
keyword[if] identifier[offset] keyword[is] keyword[not] keyword[None] keyword[and] identifier[limit] keyword[is] keyword[not] keyword[None] :
identifier[Validation] . identifier[limit_offset_check] ( identifier[limit] , identifier[offset] )
identifier[rng] = literal[string] %( identifier[offset] , identifier[limit] )
keyword[with] identifier[self] . identifier[__requests] :
keyword[if] identifier[requestId] keyword[is] keyword[None] :
identifier[requestId] = identifier[self] . identifier[__new_request_id] ()
keyword[elif] identifier[requestId] keyword[in] identifier[self] . identifier[__requests] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[requestId] )
identifier[inner_msg] = identifier[self] . identifier[__make_innermsg] ( identifier[resource] , identifier[rtype] , identifier[requestId] , identifier[action] , identifier[payload] , identifier[rng] )
identifier[self] . identifier[__requests] [ identifier[requestId] ]= identifier[ret] = identifier[RequestEvent] ( identifier[requestId] , identifier[inner_msg] , identifier[is_crud] = identifier[is_crud] )
keyword[if] keyword[not] identifier[self] . identifier[__retry_enqueue] ( identifier[PreparedMessage] ( identifier[inner_msg] , identifier[requestId] )):
keyword[raise] identifier[LinkShutdownException] ( literal[string] )
keyword[return] identifier[ret] | def _request(self, resource, rtype, action=None, payload=None, offset=None, limit=None, requestId=None, is_crud=False):
"""_request amqp queue publish helper
return: RequestEvent object or None for failed to publish
"""
end = self.__end
if end.is_set():
raise LinkShutdownException('Client stopped') # depends on [control=['if'], data=[]]
rng = None
if offset is not None and limit is not None:
Validation.limit_offset_check(limit, offset)
rng = '%d/%d' % (offset, limit) # depends on [control=['if'], data=[]]
with self.__requests:
if requestId is None:
requestId = self.__new_request_id() # depends on [control=['if'], data=['requestId']]
elif requestId in self.__requests:
raise ValueError('requestId %s already in use' % requestId) # depends on [control=['if'], data=['requestId']]
inner_msg = self.__make_innermsg(resource, rtype, requestId, action, payload, rng)
self.__requests[requestId] = ret = RequestEvent(requestId, inner_msg, is_crud=is_crud) # depends on [control=['with'], data=[]]
#
if not self.__retry_enqueue(PreparedMessage(inner_msg, requestId)):
raise LinkShutdownException('Client stopping') # depends on [control=['if'], data=[]]
return ret |
def propagate_ids(cls, obj, match_id, new_id, applied_keys, backend=None):
"""
Recursively propagate an id through an object for components
matching the applied_keys. This method can only be called if
there is a tree with a matching id in Store.custom_options
"""
applied = []
def propagate(o):
if o.id == match_id or (o.__class__.__name__ == 'DynamicMap'):
setattr(o, 'id', new_id)
applied.append(o)
obj.traverse(propagate, specs=set(applied_keys) | {'DynamicMap'})
# Clean up the custom tree if it was not applied
if not new_id in Store.custom_options(backend=backend):
raise AssertionError("New option id %d does not match any "
"option trees in Store.custom_options."
% new_id)
return applied | def function[propagate_ids, parameter[cls, obj, match_id, new_id, applied_keys, backend]]:
constant[
Recursively propagate an id through an object for components
matching the applied_keys. This method can only be called if
there is a tree with a matching id in Store.custom_options
]
variable[applied] assign[=] list[[]]
def function[propagate, parameter[o]]:
if <ast.BoolOp object at 0x7da18f58f370> begin[:]
call[name[setattr], parameter[name[o], constant[id], name[new_id]]]
call[name[applied].append, parameter[name[o]]]
call[name[obj].traverse, parameter[name[propagate]]]
if <ast.UnaryOp object at 0x7da18f58e6b0> begin[:]
<ast.Raise object at 0x7da18f58f640>
return[name[applied]] | keyword[def] identifier[propagate_ids] ( identifier[cls] , identifier[obj] , identifier[match_id] , identifier[new_id] , identifier[applied_keys] , identifier[backend] = keyword[None] ):
literal[string]
identifier[applied] =[]
keyword[def] identifier[propagate] ( identifier[o] ):
keyword[if] identifier[o] . identifier[id] == identifier[match_id] keyword[or] ( identifier[o] . identifier[__class__] . identifier[__name__] == literal[string] ):
identifier[setattr] ( identifier[o] , literal[string] , identifier[new_id] )
identifier[applied] . identifier[append] ( identifier[o] )
identifier[obj] . identifier[traverse] ( identifier[propagate] , identifier[specs] = identifier[set] ( identifier[applied_keys] )|{ literal[string] })
keyword[if] keyword[not] identifier[new_id] keyword[in] identifier[Store] . identifier[custom_options] ( identifier[backend] = identifier[backend] ):
keyword[raise] identifier[AssertionError] ( literal[string]
literal[string]
% identifier[new_id] )
keyword[return] identifier[applied] | def propagate_ids(cls, obj, match_id, new_id, applied_keys, backend=None):
"""
Recursively propagate an id through an object for components
matching the applied_keys. This method can only be called if
there is a tree with a matching id in Store.custom_options
"""
applied = []
def propagate(o):
if o.id == match_id or o.__class__.__name__ == 'DynamicMap':
setattr(o, 'id', new_id)
applied.append(o) # depends on [control=['if'], data=[]]
obj.traverse(propagate, specs=set(applied_keys) | {'DynamicMap'})
# Clean up the custom tree if it was not applied
if not new_id in Store.custom_options(backend=backend):
raise AssertionError('New option id %d does not match any option trees in Store.custom_options.' % new_id) # depends on [control=['if'], data=[]]
return applied |
def render_word(self, text, tag, i):
"""Render individual word.
text (unicode): Word text.
tag (unicode): Part-of-speech tag.
i (int): Unique ID, typically word index.
RETURNS (unicode): Rendered SVG markup.
"""
y = self.offset_y + self.word_spacing
x = self.offset_x + i * self.distance
if self.direction == "rtl":
x = self.width - x
html_text = escape_html(text)
return TPL_DEP_WORDS.format(text=html_text, tag=tag, x=x, y=y) | def function[render_word, parameter[self, text, tag, i]]:
constant[Render individual word.
text (unicode): Word text.
tag (unicode): Part-of-speech tag.
i (int): Unique ID, typically word index.
RETURNS (unicode): Rendered SVG markup.
]
variable[y] assign[=] binary_operation[name[self].offset_y + name[self].word_spacing]
variable[x] assign[=] binary_operation[name[self].offset_x + binary_operation[name[i] * name[self].distance]]
if compare[name[self].direction equal[==] constant[rtl]] begin[:]
variable[x] assign[=] binary_operation[name[self].width - name[x]]
variable[html_text] assign[=] call[name[escape_html], parameter[name[text]]]
return[call[name[TPL_DEP_WORDS].format, parameter[]]] | keyword[def] identifier[render_word] ( identifier[self] , identifier[text] , identifier[tag] , identifier[i] ):
literal[string]
identifier[y] = identifier[self] . identifier[offset_y] + identifier[self] . identifier[word_spacing]
identifier[x] = identifier[self] . identifier[offset_x] + identifier[i] * identifier[self] . identifier[distance]
keyword[if] identifier[self] . identifier[direction] == literal[string] :
identifier[x] = identifier[self] . identifier[width] - identifier[x]
identifier[html_text] = identifier[escape_html] ( identifier[text] )
keyword[return] identifier[TPL_DEP_WORDS] . identifier[format] ( identifier[text] = identifier[html_text] , identifier[tag] = identifier[tag] , identifier[x] = identifier[x] , identifier[y] = identifier[y] ) | def render_word(self, text, tag, i):
"""Render individual word.
text (unicode): Word text.
tag (unicode): Part-of-speech tag.
i (int): Unique ID, typically word index.
RETURNS (unicode): Rendered SVG markup.
"""
y = self.offset_y + self.word_spacing
x = self.offset_x + i * self.distance
if self.direction == 'rtl':
x = self.width - x # depends on [control=['if'], data=[]]
html_text = escape_html(text)
return TPL_DEP_WORDS.format(text=html_text, tag=tag, x=x, y=y) |
def get_upload_path(self):
"""Returns the uploaded file path from the storage backend.
:returns: File path from the storage backend.
:rtype: :py:class:`unicode`
"""
location = self.get_storage().location
return self.cleaned_data['key_name'][len(location):] | def function[get_upload_path, parameter[self]]:
constant[Returns the uploaded file path from the storage backend.
:returns: File path from the storage backend.
:rtype: :py:class:`unicode`
]
variable[location] assign[=] call[name[self].get_storage, parameter[]].location
return[call[call[name[self].cleaned_data][constant[key_name]]][<ast.Slice object at 0x7da204565900>]] | keyword[def] identifier[get_upload_path] ( identifier[self] ):
literal[string]
identifier[location] = identifier[self] . identifier[get_storage] (). identifier[location]
keyword[return] identifier[self] . identifier[cleaned_data] [ literal[string] ][ identifier[len] ( identifier[location] ):] | def get_upload_path(self):
"""Returns the uploaded file path from the storage backend.
:returns: File path from the storage backend.
:rtype: :py:class:`unicode`
"""
location = self.get_storage().location
return self.cleaned_data['key_name'][len(location):] |
def from_user_creds(cls, username, password, url=URL_BASE):
"""
Obtain a short-lived token using a username and password, and use that
token to create an auth object.
"""
session = requests.session()
token_resp = session.post(url.rstrip('/') + '/user/login/',
data={'username': username,
'password': password})
if token_resp.status_code != 200:
error = token_resp.text
try:
error = json.loads(error)['error']
except (KeyError, ValueError):
pass
raise LuminosoLoginError(error)
return cls(token_resp.json()['result']['token']) | def function[from_user_creds, parameter[cls, username, password, url]]:
constant[
Obtain a short-lived token using a username and password, and use that
token to create an auth object.
]
variable[session] assign[=] call[name[requests].session, parameter[]]
variable[token_resp] assign[=] call[name[session].post, parameter[binary_operation[call[name[url].rstrip, parameter[constant[/]]] + constant[/user/login/]]]]
if compare[name[token_resp].status_code not_equal[!=] constant[200]] begin[:]
variable[error] assign[=] name[token_resp].text
<ast.Try object at 0x7da1b05c70d0>
<ast.Raise object at 0x7da1b05c4f10>
return[call[name[cls], parameter[call[call[call[name[token_resp].json, parameter[]]][constant[result]]][constant[token]]]]] | keyword[def] identifier[from_user_creds] ( identifier[cls] , identifier[username] , identifier[password] , identifier[url] = identifier[URL_BASE] ):
literal[string]
identifier[session] = identifier[requests] . identifier[session] ()
identifier[token_resp] = identifier[session] . identifier[post] ( identifier[url] . identifier[rstrip] ( literal[string] )+ literal[string] ,
identifier[data] ={ literal[string] : identifier[username] ,
literal[string] : identifier[password] })
keyword[if] identifier[token_resp] . identifier[status_code] != literal[int] :
identifier[error] = identifier[token_resp] . identifier[text]
keyword[try] :
identifier[error] = identifier[json] . identifier[loads] ( identifier[error] )[ literal[string] ]
keyword[except] ( identifier[KeyError] , identifier[ValueError] ):
keyword[pass]
keyword[raise] identifier[LuminosoLoginError] ( identifier[error] )
keyword[return] identifier[cls] ( identifier[token_resp] . identifier[json] ()[ literal[string] ][ literal[string] ]) | def from_user_creds(cls, username, password, url=URL_BASE):
"""
Obtain a short-lived token using a username and password, and use that
token to create an auth object.
"""
session = requests.session()
token_resp = session.post(url.rstrip('/') + '/user/login/', data={'username': username, 'password': password})
if token_resp.status_code != 200:
error = token_resp.text
try:
error = json.loads(error)['error'] # depends on [control=['try'], data=[]]
except (KeyError, ValueError):
pass # depends on [control=['except'], data=[]]
raise LuminosoLoginError(error) # depends on [control=['if'], data=[]]
return cls(token_resp.json()['result']['token']) |
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
:rtype: requests.Response
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response | def function[build_response, parameter[self, req, resp]]:
constant[Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
:rtype: requests.Response
]
variable[response] assign[=] call[name[Response], parameter[]]
name[response].status_code assign[=] call[name[getattr], parameter[name[resp], constant[status], constant[None]]]
name[response].headers assign[=] call[name[CaseInsensitiveDict], parameter[call[name[getattr], parameter[name[resp], constant[headers], dictionary[[], []]]]]]
name[response].encoding assign[=] call[name[get_encoding_from_headers], parameter[name[response].headers]]
name[response].raw assign[=] name[resp]
name[response].reason assign[=] name[response].raw.reason
if call[name[isinstance], parameter[name[req].url, name[bytes]]] begin[:]
name[response].url assign[=] call[name[req].url.decode, parameter[constant[utf-8]]]
call[name[extract_cookies_to_jar], parameter[name[response].cookies, name[req], name[resp]]]
name[response].request assign[=] name[req]
name[response].connection assign[=] name[self]
return[name[response]] | keyword[def] identifier[build_response] ( identifier[self] , identifier[req] , identifier[resp] ):
literal[string]
identifier[response] = identifier[Response] ()
identifier[response] . identifier[status_code] = identifier[getattr] ( identifier[resp] , literal[string] , keyword[None] )
identifier[response] . identifier[headers] = identifier[CaseInsensitiveDict] ( identifier[getattr] ( identifier[resp] , literal[string] ,{}))
identifier[response] . identifier[encoding] = identifier[get_encoding_from_headers] ( identifier[response] . identifier[headers] )
identifier[response] . identifier[raw] = identifier[resp]
identifier[response] . identifier[reason] = identifier[response] . identifier[raw] . identifier[reason]
keyword[if] identifier[isinstance] ( identifier[req] . identifier[url] , identifier[bytes] ):
identifier[response] . identifier[url] = identifier[req] . identifier[url] . identifier[decode] ( literal[string] )
keyword[else] :
identifier[response] . identifier[url] = identifier[req] . identifier[url]
identifier[extract_cookies_to_jar] ( identifier[response] . identifier[cookies] , identifier[req] , identifier[resp] )
identifier[response] . identifier[request] = identifier[req]
identifier[response] . identifier[connection] = identifier[self]
keyword[return] identifier[response] | def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
:rtype: requests.Response
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8') # depends on [control=['if'], data=[]]
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response |
def baseline(value):
"""ValueRef : string, vertical alignment of mark
Possible values are ``'top'``, ``'middle'``, and ``'bottom'``. Only
used if ``type`` is ``'image'`` or ``'text'``.
"""
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_baseline:
raise ValueError(value.value + ' is not a valid baseline') | def function[baseline, parameter[value]]:
constant[ValueRef : string, vertical alignment of mark
Possible values are ``'top'``, ``'middle'``, and ``'bottom'``. Only
used if ``type`` is ``'image'`` or ``'text'``.
]
if name[value].value begin[:]
call[name[_assert_is_type], parameter[constant[shape.value], name[value].value, name[str_types]]]
if compare[name[value].value <ast.NotIn object at 0x7da2590d7190> name[PropertySet]._valid_baseline] begin[:]
<ast.Raise object at 0x7da18f58c070> | keyword[def] identifier[baseline] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] . identifier[value] :
identifier[_assert_is_type] ( literal[string] , identifier[value] . identifier[value] , identifier[str_types] )
keyword[if] identifier[value] . identifier[value] keyword[not] keyword[in] identifier[PropertySet] . identifier[_valid_baseline] :
keyword[raise] identifier[ValueError] ( identifier[value] . identifier[value] + literal[string] ) | def baseline(value):
"""ValueRef : string, vertical alignment of mark
Possible values are ``'top'``, ``'middle'``, and ``'bottom'``. Only
used if ``type`` is ``'image'`` or ``'text'``.
"""
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_baseline:
raise ValueError(value.value + ' is not a valid baseline') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def maximumORFLength(self, openORFs=True):
"""
Return the length of the longest (possibly partial) ORF in a translated
read. The ORF may originate or terminate outside the sequence, which is
why the length is just a lower bound.
"""
return max(len(orf) for orf in self.ORFs(openORFs)) | def function[maximumORFLength, parameter[self, openORFs]]:
constant[
Return the length of the longest (possibly partial) ORF in a translated
read. The ORF may originate or terminate outside the sequence, which is
why the length is just a lower bound.
]
return[call[name[max], parameter[<ast.GeneratorExp object at 0x7da18f722290>]]] | keyword[def] identifier[maximumORFLength] ( identifier[self] , identifier[openORFs] = keyword[True] ):
literal[string]
keyword[return] identifier[max] ( identifier[len] ( identifier[orf] ) keyword[for] identifier[orf] keyword[in] identifier[self] . identifier[ORFs] ( identifier[openORFs] )) | def maximumORFLength(self, openORFs=True):
"""
Return the length of the longest (possibly partial) ORF in a translated
read. The ORF may originate or terminate outside the sequence, which is
why the length is just a lower bound.
"""
return max((len(orf) for orf in self.ORFs(openORFs))) |
def getSlicesForText(self, body, getFingerprint=None, startIndex=0, maxResults=10):
"""Get a list of slices of the text
Args:
body, str: The text to be evaluated (required)
getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
startIndex, int: The start-index for pagination (optional)
maxResults, int: Max results per page (optional)
Returns:
list of Text
Raises:
CorticalioException: if the request was not successful
"""
return self._text.getSlicesForText(self._retina, body, getFingerprint, startIndex, maxResults) | def function[getSlicesForText, parameter[self, body, getFingerprint, startIndex, maxResults]]:
constant[Get a list of slices of the text
Args:
body, str: The text to be evaluated (required)
getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
startIndex, int: The start-index for pagination (optional)
maxResults, int: Max results per page (optional)
Returns:
list of Text
Raises:
CorticalioException: if the request was not successful
]
return[call[name[self]._text.getSlicesForText, parameter[name[self]._retina, name[body], name[getFingerprint], name[startIndex], name[maxResults]]]] | keyword[def] identifier[getSlicesForText] ( identifier[self] , identifier[body] , identifier[getFingerprint] = keyword[None] , identifier[startIndex] = literal[int] , identifier[maxResults] = literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_text] . identifier[getSlicesForText] ( identifier[self] . identifier[_retina] , identifier[body] , identifier[getFingerprint] , identifier[startIndex] , identifier[maxResults] ) | def getSlicesForText(self, body, getFingerprint=None, startIndex=0, maxResults=10):
"""Get a list of slices of the text
Args:
body, str: The text to be evaluated (required)
getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
startIndex, int: The start-index for pagination (optional)
maxResults, int: Max results per page (optional)
Returns:
list of Text
Raises:
CorticalioException: if the request was not successful
"""
return self._text.getSlicesForText(self._retina, body, getFingerprint, startIndex, maxResults) |
def _finalize_azure_file(self, ud, metadata):
# type: (Uploader, blobxfer.models.upload.Descriptor, dict) -> None
"""Finalize Azure File
:param Uploader self: this
:param blobxfer.models.upload.Descriptor ud: upload descriptor
:param dict metadata: metadata dict
"""
# set md5 file property if required
if ud.requires_non_encrypted_md5_put:
digest = blobxfer.util.base64_encode_as_string(ud.md5.digest())
else:
digest = None
if digest is not None or ud.entity.cache_control is not None:
blobxfer.operations.azure.file.set_file_properties(
ud.entity, digest)
if blobxfer.util.is_not_empty(ud.entity.replica_targets):
for ase in ud.entity.replica_targets:
blobxfer.operations.azure.file.set_file_properties(
ase, digest)
# set file metadata if needed
if blobxfer.util.is_not_empty(metadata):
blobxfer.operations.azure.file.set_file_metadata(
ud.entity, metadata)
if blobxfer.util.is_not_empty(ud.entity.replica_targets):
for ase in ud.entity.replica_targets:
blobxfer.operations.azure.file.set_file_metadata(
ase, metadata) | def function[_finalize_azure_file, parameter[self, ud, metadata]]:
constant[Finalize Azure File
:param Uploader self: this
:param blobxfer.models.upload.Descriptor ud: upload descriptor
:param dict metadata: metadata dict
]
if name[ud].requires_non_encrypted_md5_put begin[:]
variable[digest] assign[=] call[name[blobxfer].util.base64_encode_as_string, parameter[call[name[ud].md5.digest, parameter[]]]]
if <ast.BoolOp object at 0x7da18f58ded0> begin[:]
call[name[blobxfer].operations.azure.file.set_file_properties, parameter[name[ud].entity, name[digest]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[ud].entity.replica_targets]] begin[:]
for taget[name[ase]] in starred[name[ud].entity.replica_targets] begin[:]
call[name[blobxfer].operations.azure.file.set_file_properties, parameter[name[ase], name[digest]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[metadata]]] begin[:]
call[name[blobxfer].operations.azure.file.set_file_metadata, parameter[name[ud].entity, name[metadata]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[ud].entity.replica_targets]] begin[:]
for taget[name[ase]] in starred[name[ud].entity.replica_targets] begin[:]
call[name[blobxfer].operations.azure.file.set_file_metadata, parameter[name[ase], name[metadata]]] | keyword[def] identifier[_finalize_azure_file] ( identifier[self] , identifier[ud] , identifier[metadata] ):
literal[string]
keyword[if] identifier[ud] . identifier[requires_non_encrypted_md5_put] :
identifier[digest] = identifier[blobxfer] . identifier[util] . identifier[base64_encode_as_string] ( identifier[ud] . identifier[md5] . identifier[digest] ())
keyword[else] :
identifier[digest] = keyword[None]
keyword[if] identifier[digest] keyword[is] keyword[not] keyword[None] keyword[or] identifier[ud] . identifier[entity] . identifier[cache_control] keyword[is] keyword[not] keyword[None] :
identifier[blobxfer] . identifier[operations] . identifier[azure] . identifier[file] . identifier[set_file_properties] (
identifier[ud] . identifier[entity] , identifier[digest] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[ud] . identifier[entity] . identifier[replica_targets] ):
keyword[for] identifier[ase] keyword[in] identifier[ud] . identifier[entity] . identifier[replica_targets] :
identifier[blobxfer] . identifier[operations] . identifier[azure] . identifier[file] . identifier[set_file_properties] (
identifier[ase] , identifier[digest] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[metadata] ):
identifier[blobxfer] . identifier[operations] . identifier[azure] . identifier[file] . identifier[set_file_metadata] (
identifier[ud] . identifier[entity] , identifier[metadata] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[ud] . identifier[entity] . identifier[replica_targets] ):
keyword[for] identifier[ase] keyword[in] identifier[ud] . identifier[entity] . identifier[replica_targets] :
identifier[blobxfer] . identifier[operations] . identifier[azure] . identifier[file] . identifier[set_file_metadata] (
identifier[ase] , identifier[metadata] ) | def _finalize_azure_file(self, ud, metadata):
# type: (Uploader, blobxfer.models.upload.Descriptor, dict) -> None
'Finalize Azure File\n :param Uploader self: this\n :param blobxfer.models.upload.Descriptor ud: upload descriptor\n :param dict metadata: metadata dict\n '
# set md5 file property if required
if ud.requires_non_encrypted_md5_put:
digest = blobxfer.util.base64_encode_as_string(ud.md5.digest()) # depends on [control=['if'], data=[]]
else:
digest = None
if digest is not None or ud.entity.cache_control is not None:
blobxfer.operations.azure.file.set_file_properties(ud.entity, digest)
if blobxfer.util.is_not_empty(ud.entity.replica_targets):
for ase in ud.entity.replica_targets:
blobxfer.operations.azure.file.set_file_properties(ase, digest) # depends on [control=['for'], data=['ase']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# set file metadata if needed
if blobxfer.util.is_not_empty(metadata):
blobxfer.operations.azure.file.set_file_metadata(ud.entity, metadata)
if blobxfer.util.is_not_empty(ud.entity.replica_targets):
for ase in ud.entity.replica_targets:
blobxfer.operations.azure.file.set_file_metadata(ase, metadata) # depends on [control=['for'], data=['ase']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def from_filename(self, filename):
'''
Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON
'''
if os.path.exists(filename):
with open(filename) as fp:
return IntentSchema(json.load(fp, object_pairs_hook=OrderedDict))
else:
print ('File does not exist')
return IntentSchema() | def function[from_filename, parameter[self, filename]]:
constant[
Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON
]
if call[name[os].path.exists, parameter[name[filename]]] begin[:]
with call[name[open], parameter[name[filename]]] begin[:]
return[call[name[IntentSchema], parameter[call[name[json].load, parameter[name[fp]]]]]] | keyword[def] identifier[from_filename] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[fp] :
keyword[return] identifier[IntentSchema] ( identifier[json] . identifier[load] ( identifier[fp] , identifier[object_pairs_hook] = identifier[OrderedDict] ))
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] identifier[IntentSchema] () | def from_filename(self, filename):
"""
Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON
"""
if os.path.exists(filename):
with open(filename) as fp:
return IntentSchema(json.load(fp, object_pairs_hook=OrderedDict)) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]]
else:
print('File does not exist')
return IntentSchema() |
def create_object(self, obj_type, payload, return_fields=None):
"""Create an Infoblox object of type 'obj_type'
Args:
obj_type (str): Infoblox object type,
e.g. 'network', 'range', etc.
payload (dict): Payload with data to send
return_fields (list): List of fields to be returned
Returns:
The object reference of the newly create object
Raises:
InfobloxException
"""
self._validate_obj_type_or_die(obj_type)
query_params = self._build_query_params(return_fields=return_fields)
url = self._construct_url(obj_type, query_params)
opts = self._get_request_options(data=payload)
self._log_request('post', url, opts)
if(self.session.cookies):
# the first 'get' or 'post' action will generate a cookie
# after that, we don't need to re-authenticate
self.session.auth = None
r = self.session.post(url, **opts)
self._validate_authorized(r)
if r.status_code != requests.codes.CREATED:
response = utils.safe_json_load(r.content)
already_assigned = 'is assigned to another network view'
if response and already_assigned in response.get('text'):
exception = ib_ex.InfobloxMemberAlreadyAssigned
else:
exception = ib_ex.InfobloxCannotCreateObject
raise exception(
response=response,
obj_type=obj_type,
content=r.content,
args=payload,
code=r.status_code)
return self._parse_reply(r) | def function[create_object, parameter[self, obj_type, payload, return_fields]]:
constant[Create an Infoblox object of type 'obj_type'
Args:
obj_type (str): Infoblox object type,
e.g. 'network', 'range', etc.
payload (dict): Payload with data to send
return_fields (list): List of fields to be returned
Returns:
The object reference of the newly create object
Raises:
InfobloxException
]
call[name[self]._validate_obj_type_or_die, parameter[name[obj_type]]]
variable[query_params] assign[=] call[name[self]._build_query_params, parameter[]]
variable[url] assign[=] call[name[self]._construct_url, parameter[name[obj_type], name[query_params]]]
variable[opts] assign[=] call[name[self]._get_request_options, parameter[]]
call[name[self]._log_request, parameter[constant[post], name[url], name[opts]]]
if name[self].session.cookies begin[:]
name[self].session.auth assign[=] constant[None]
variable[r] assign[=] call[name[self].session.post, parameter[name[url]]]
call[name[self]._validate_authorized, parameter[name[r]]]
if compare[name[r].status_code not_equal[!=] name[requests].codes.CREATED] begin[:]
variable[response] assign[=] call[name[utils].safe_json_load, parameter[name[r].content]]
variable[already_assigned] assign[=] constant[is assigned to another network view]
if <ast.BoolOp object at 0x7da18bc705e0> begin[:]
variable[exception] assign[=] name[ib_ex].InfobloxMemberAlreadyAssigned
<ast.Raise object at 0x7da18bc704c0>
return[call[name[self]._parse_reply, parameter[name[r]]]] | keyword[def] identifier[create_object] ( identifier[self] , identifier[obj_type] , identifier[payload] , identifier[return_fields] = keyword[None] ):
literal[string]
identifier[self] . identifier[_validate_obj_type_or_die] ( identifier[obj_type] )
identifier[query_params] = identifier[self] . identifier[_build_query_params] ( identifier[return_fields] = identifier[return_fields] )
identifier[url] = identifier[self] . identifier[_construct_url] ( identifier[obj_type] , identifier[query_params] )
identifier[opts] = identifier[self] . identifier[_get_request_options] ( identifier[data] = identifier[payload] )
identifier[self] . identifier[_log_request] ( literal[string] , identifier[url] , identifier[opts] )
keyword[if] ( identifier[self] . identifier[session] . identifier[cookies] ):
identifier[self] . identifier[session] . identifier[auth] = keyword[None]
identifier[r] = identifier[self] . identifier[session] . identifier[post] ( identifier[url] ,** identifier[opts] )
identifier[self] . identifier[_validate_authorized] ( identifier[r] )
keyword[if] identifier[r] . identifier[status_code] != identifier[requests] . identifier[codes] . identifier[CREATED] :
identifier[response] = identifier[utils] . identifier[safe_json_load] ( identifier[r] . identifier[content] )
identifier[already_assigned] = literal[string]
keyword[if] identifier[response] keyword[and] identifier[already_assigned] keyword[in] identifier[response] . identifier[get] ( literal[string] ):
identifier[exception] = identifier[ib_ex] . identifier[InfobloxMemberAlreadyAssigned]
keyword[else] :
identifier[exception] = identifier[ib_ex] . identifier[InfobloxCannotCreateObject]
keyword[raise] identifier[exception] (
identifier[response] = identifier[response] ,
identifier[obj_type] = identifier[obj_type] ,
identifier[content] = identifier[r] . identifier[content] ,
identifier[args] = identifier[payload] ,
identifier[code] = identifier[r] . identifier[status_code] )
keyword[return] identifier[self] . identifier[_parse_reply] ( identifier[r] ) | def create_object(self, obj_type, payload, return_fields=None):
"""Create an Infoblox object of type 'obj_type'
Args:
obj_type (str): Infoblox object type,
e.g. 'network', 'range', etc.
payload (dict): Payload with data to send
return_fields (list): List of fields to be returned
Returns:
The object reference of the newly create object
Raises:
InfobloxException
"""
self._validate_obj_type_or_die(obj_type)
query_params = self._build_query_params(return_fields=return_fields)
url = self._construct_url(obj_type, query_params)
opts = self._get_request_options(data=payload)
self._log_request('post', url, opts)
if self.session.cookies:
# the first 'get' or 'post' action will generate a cookie
# after that, we don't need to re-authenticate
self.session.auth = None # depends on [control=['if'], data=[]]
r = self.session.post(url, **opts)
self._validate_authorized(r)
if r.status_code != requests.codes.CREATED:
response = utils.safe_json_load(r.content)
already_assigned = 'is assigned to another network view'
if response and already_assigned in response.get('text'):
exception = ib_ex.InfobloxMemberAlreadyAssigned # depends on [control=['if'], data=[]]
else:
exception = ib_ex.InfobloxCannotCreateObject
raise exception(response=response, obj_type=obj_type, content=r.content, args=payload, code=r.status_code) # depends on [control=['if'], data=[]]
return self._parse_reply(r) |
def update(self, other, **kwargs):
"""
A dict-like update for Struct attributes.
"""
if other is None: return
if not isinstance(other, dict):
other = other.to_dict()
self.__dict__.update(other, **kwargs) | def function[update, parameter[self, other]]:
constant[
A dict-like update for Struct attributes.
]
if compare[name[other] is constant[None]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da2041da6b0> begin[:]
variable[other] assign[=] call[name[other].to_dict, parameter[]]
call[name[self].__dict__.update, parameter[name[other]]] | keyword[def] identifier[update] ( identifier[self] , identifier[other] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[other] keyword[is] keyword[None] : keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[dict] ):
identifier[other] = identifier[other] . identifier[to_dict] ()
identifier[self] . identifier[__dict__] . identifier[update] ( identifier[other] ,** identifier[kwargs] ) | def update(self, other, **kwargs):
"""
A dict-like update for Struct attributes.
"""
if other is None:
return # depends on [control=['if'], data=[]]
if not isinstance(other, dict):
other = other.to_dict() # depends on [control=['if'], data=[]]
self.__dict__.update(other, **kwargs) |
def make_proxy_method(cls, name):
"""Creates a proxy function that can be used by Flasks routing. The
proxy instantiates the Mocha subclass and calls the appropriate
method.
:param name: the name of the method to create a proxy for
"""
i = cls()
view = getattr(i, name)
for decorator in cls.decorators:
view = decorator(view)
@functools.wraps(view)
def proxy(**forgettable_view_args):
# Always use the global request object's view_args, because they
# can be modified by intervening function before an endpoint or
# wrapper gets called. This matches Flask's behavior.
del forgettable_view_args
if hasattr(i, "before_request"):
response = i.before_request(name, **request.view_args)
if response is not None:
return response
before_view_name = "before_" + name
if hasattr(i, before_view_name):
before_view = getattr(i, before_view_name)
response = before_view(**request.view_args)
if response is not None:
return response
response = view(**request.view_args)
# You can also return a dict or None, it will pass it to render
if isinstance(response, dict) or response is None:
response = response or {}
if hasattr(i, "_renderer"):
response = i._renderer(response)
else:
_template = build_endpoint_route_name(cls, view.__name__)
_template = utils.list_replace([".", ":"], "/", _template)
_template = "%s.%s" % (_template, cls.template_markup)
# Set the title from the nav title, if not set
_meta_title = getattr(g, "__META__", {}).get("title")
if (not _meta_title or _meta_title == "") and get_view_attr(view, "title"):
page_attr(title=get_view_attr(view, "title"))
response.setdefault("_template", _template)
response = i.render(**response)
if not isinstance(response, Response):
response = make_response(response)
for ext in cls._ext:
response = ext(response)
after_view_name = "after_" + name
if hasattr(i, after_view_name):
after_view = getattr(i, after_view_name)
response = after_view(response)
if hasattr(i, "after_request"):
response = i.after_request(name, response)
return response
return proxy | def function[make_proxy_method, parameter[cls, name]]:
constant[Creates a proxy function that can be used by Flasks routing. The
proxy instantiates the Mocha subclass and calls the appropriate
method.
:param name: the name of the method to create a proxy for
]
variable[i] assign[=] call[name[cls], parameter[]]
variable[view] assign[=] call[name[getattr], parameter[name[i], name[name]]]
for taget[name[decorator]] in starred[name[cls].decorators] begin[:]
variable[view] assign[=] call[name[decorator], parameter[name[view]]]
def function[proxy, parameter[]]:
<ast.Delete object at 0x7da207f02080>
if call[name[hasattr], parameter[name[i], constant[before_request]]] begin[:]
variable[response] assign[=] call[name[i].before_request, parameter[name[name]]]
if compare[name[response] is_not constant[None]] begin[:]
return[name[response]]
variable[before_view_name] assign[=] binary_operation[constant[before_] + name[name]]
if call[name[hasattr], parameter[name[i], name[before_view_name]]] begin[:]
variable[before_view] assign[=] call[name[getattr], parameter[name[i], name[before_view_name]]]
variable[response] assign[=] call[name[before_view], parameter[]]
if compare[name[response] is_not constant[None]] begin[:]
return[name[response]]
variable[response] assign[=] call[name[view], parameter[]]
if <ast.BoolOp object at 0x7da207f02a40> begin[:]
variable[response] assign[=] <ast.BoolOp object at 0x7da207f023b0>
if call[name[hasattr], parameter[name[i], constant[_renderer]]] begin[:]
variable[response] assign[=] call[name[i]._renderer, parameter[name[response]]]
if <ast.UnaryOp object at 0x7da20c6e43a0> begin[:]
variable[response] assign[=] call[name[make_response], parameter[name[response]]]
for taget[name[ext]] in starred[name[cls]._ext] begin[:]
variable[response] assign[=] call[name[ext], parameter[name[response]]]
variable[after_view_name] assign[=] binary_operation[constant[after_] + name[name]]
if call[name[hasattr], parameter[name[i], name[after_view_name]]] begin[:]
variable[after_view] assign[=] call[name[getattr], parameter[name[i], name[after_view_name]]]
variable[response] assign[=] call[name[after_view], parameter[name[response]]]
if call[name[hasattr], parameter[name[i], constant[after_request]]] begin[:]
variable[response] assign[=] call[name[i].after_request, parameter[name[name], name[response]]]
return[name[response]]
return[name[proxy]] | keyword[def] identifier[make_proxy_method] ( identifier[cls] , identifier[name] ):
literal[string]
identifier[i] = identifier[cls] ()
identifier[view] = identifier[getattr] ( identifier[i] , identifier[name] )
keyword[for] identifier[decorator] keyword[in] identifier[cls] . identifier[decorators] :
identifier[view] = identifier[decorator] ( identifier[view] )
@ identifier[functools] . identifier[wraps] ( identifier[view] )
keyword[def] identifier[proxy] (** identifier[forgettable_view_args] ):
keyword[del] identifier[forgettable_view_args]
keyword[if] identifier[hasattr] ( identifier[i] , literal[string] ):
identifier[response] = identifier[i] . identifier[before_request] ( identifier[name] ,** identifier[request] . identifier[view_args] )
keyword[if] identifier[response] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[response]
identifier[before_view_name] = literal[string] + identifier[name]
keyword[if] identifier[hasattr] ( identifier[i] , identifier[before_view_name] ):
identifier[before_view] = identifier[getattr] ( identifier[i] , identifier[before_view_name] )
identifier[response] = identifier[before_view] (** identifier[request] . identifier[view_args] )
keyword[if] identifier[response] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[response]
identifier[response] = identifier[view] (** identifier[request] . identifier[view_args] )
keyword[if] identifier[isinstance] ( identifier[response] , identifier[dict] ) keyword[or] identifier[response] keyword[is] keyword[None] :
identifier[response] = identifier[response] keyword[or] {}
keyword[if] identifier[hasattr] ( identifier[i] , literal[string] ):
identifier[response] = identifier[i] . identifier[_renderer] ( identifier[response] )
keyword[else] :
identifier[_template] = identifier[build_endpoint_route_name] ( identifier[cls] , identifier[view] . identifier[__name__] )
identifier[_template] = identifier[utils] . identifier[list_replace] ([ literal[string] , literal[string] ], literal[string] , identifier[_template] )
identifier[_template] = literal[string] %( identifier[_template] , identifier[cls] . identifier[template_markup] )
identifier[_meta_title] = identifier[getattr] ( identifier[g] , literal[string] ,{}). identifier[get] ( literal[string] )
keyword[if] ( keyword[not] identifier[_meta_title] keyword[or] identifier[_meta_title] == literal[string] ) keyword[and] identifier[get_view_attr] ( identifier[view] , literal[string] ):
identifier[page_attr] ( identifier[title] = identifier[get_view_attr] ( identifier[view] , literal[string] ))
identifier[response] . identifier[setdefault] ( literal[string] , identifier[_template] )
identifier[response] = identifier[i] . identifier[render] (** identifier[response] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[response] , identifier[Response] ):
identifier[response] = identifier[make_response] ( identifier[response] )
keyword[for] identifier[ext] keyword[in] identifier[cls] . identifier[_ext] :
identifier[response] = identifier[ext] ( identifier[response] )
identifier[after_view_name] = literal[string] + identifier[name]
keyword[if] identifier[hasattr] ( identifier[i] , identifier[after_view_name] ):
identifier[after_view] = identifier[getattr] ( identifier[i] , identifier[after_view_name] )
identifier[response] = identifier[after_view] ( identifier[response] )
keyword[if] identifier[hasattr] ( identifier[i] , literal[string] ):
identifier[response] = identifier[i] . identifier[after_request] ( identifier[name] , identifier[response] )
keyword[return] identifier[response]
keyword[return] identifier[proxy] | def make_proxy_method(cls, name):
"""Creates a proxy function that can be used by Flasks routing. The
proxy instantiates the Mocha subclass and calls the appropriate
method.
:param name: the name of the method to create a proxy for
"""
i = cls()
view = getattr(i, name)
for decorator in cls.decorators:
view = decorator(view) # depends on [control=['for'], data=['decorator']]
@functools.wraps(view)
def proxy(**forgettable_view_args):
# Always use the global request object's view_args, because they
# can be modified by intervening function before an endpoint or
# wrapper gets called. This matches Flask's behavior.
del forgettable_view_args
if hasattr(i, 'before_request'):
response = i.before_request(name, **request.view_args)
if response is not None:
return response # depends on [control=['if'], data=['response']] # depends on [control=['if'], data=[]]
before_view_name = 'before_' + name
if hasattr(i, before_view_name):
before_view = getattr(i, before_view_name)
response = before_view(**request.view_args)
if response is not None:
return response # depends on [control=['if'], data=['response']] # depends on [control=['if'], data=[]]
response = view(**request.view_args)
# You can also return a dict or None, it will pass it to render
if isinstance(response, dict) or response is None:
response = response or {}
if hasattr(i, '_renderer'):
response = i._renderer(response) # depends on [control=['if'], data=[]]
else:
_template = build_endpoint_route_name(cls, view.__name__)
_template = utils.list_replace(['.', ':'], '/', _template)
_template = '%s.%s' % (_template, cls.template_markup)
# Set the title from the nav title, if not set
_meta_title = getattr(g, '__META__', {}).get('title')
if (not _meta_title or _meta_title == '') and get_view_attr(view, 'title'):
page_attr(title=get_view_attr(view, 'title')) # depends on [control=['if'], data=[]]
response.setdefault('_template', _template)
response = i.render(**response) # depends on [control=['if'], data=[]]
if not isinstance(response, Response):
response = make_response(response) # depends on [control=['if'], data=[]]
for ext in cls._ext:
response = ext(response) # depends on [control=['for'], data=['ext']]
after_view_name = 'after_' + name
if hasattr(i, after_view_name):
after_view = getattr(i, after_view_name)
response = after_view(response) # depends on [control=['if'], data=[]]
if hasattr(i, 'after_request'):
response = i.after_request(name, response) # depends on [control=['if'], data=[]]
return response
return proxy |
def result(self):
"""
Construye la expresion
"""
field = re.sub(REGEX_CLEANER, '', self.field)
try:
value = float(self.value)
except TypeError:
value = "(%s)" % ( "', '".join(self.value) )
except ValueError:
value = str(self.value) \
.replace("\\", r"\\") \
.replace('"', r'\"') \
.replace("'", r"\'")
value = "'%s'" % value
res = "%s %s %s" % (field, self.operator, value)
if self.conjunction:
res = "%s %s" % (self.conjunction, res)
return res | def function[result, parameter[self]]:
constant[
Construye la expresion
]
variable[field] assign[=] call[name[re].sub, parameter[name[REGEX_CLEANER], constant[], name[self].field]]
<ast.Try object at 0x7da18f09ddb0>
variable[res] assign[=] binary_operation[constant[%s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a58a0>, <ast.Attribute object at 0x7da2054a6cb0>, <ast.Name object at 0x7da2054a4160>]]]
if name[self].conjunction begin[:]
variable[res] assign[=] binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2054a42b0>, <ast.Name object at 0x7da2054a4e50>]]]
return[name[res]] | keyword[def] identifier[result] ( identifier[self] ):
literal[string]
identifier[field] = identifier[re] . identifier[sub] ( identifier[REGEX_CLEANER] , literal[string] , identifier[self] . identifier[field] )
keyword[try] :
identifier[value] = identifier[float] ( identifier[self] . identifier[value] )
keyword[except] identifier[TypeError] :
identifier[value] = literal[string] %( literal[string] . identifier[join] ( identifier[self] . identifier[value] ))
keyword[except] identifier[ValueError] :
identifier[value] = identifier[str] ( identifier[self] . identifier[value] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[value] = literal[string] % identifier[value]
identifier[res] = literal[string] %( identifier[field] , identifier[self] . identifier[operator] , identifier[value] )
keyword[if] identifier[self] . identifier[conjunction] :
identifier[res] = literal[string] %( identifier[self] . identifier[conjunction] , identifier[res] )
keyword[return] identifier[res] | def result(self):
"""
Construye la expresion
"""
field = re.sub(REGEX_CLEANER, '', self.field)
try:
value = float(self.value) # depends on [control=['try'], data=[]]
except TypeError:
value = '(%s)' % "', '".join(self.value) # depends on [control=['except'], data=[]]
except ValueError:
value = str(self.value).replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
value = "'%s'" % value # depends on [control=['except'], data=[]]
res = '%s %s %s' % (field, self.operator, value)
if self.conjunction:
res = '%s %s' % (self.conjunction, res) # depends on [control=['if'], data=[]]
return res |
def make_html_para( self, words ):
""" convert words of a paragraph into tagged HTML text, handle xrefs """
line = ""
if words:
line = self.make_html_word( words[0] )
for word in words[1:]:
line = line + " " + self.make_html_word( word )
# convert `...' quotations into real left and right single quotes
line = re.sub( r"(^|\W)`(.*?)'(\W|$)", \
r'\1‘\2’\3', \
line )
# convert tilde into non-breakable space
line = string.replace( line, "~", " " )
return para_header + line + para_footer | def function[make_html_para, parameter[self, words]]:
constant[ convert words of a paragraph into tagged HTML text, handle xrefs ]
variable[line] assign[=] constant[]
if name[words] begin[:]
variable[line] assign[=] call[name[self].make_html_word, parameter[call[name[words]][constant[0]]]]
for taget[name[word]] in starred[call[name[words]][<ast.Slice object at 0x7da20e956da0>]] begin[:]
variable[line] assign[=] binary_operation[binary_operation[name[line] + constant[ ]] + call[name[self].make_html_word, parameter[name[word]]]]
variable[line] assign[=] call[name[re].sub, parameter[constant[(^|\W)`(.*?)'(\W|$)], constant[\1‘\2’\3], name[line]]]
variable[line] assign[=] call[name[string].replace, parameter[name[line], constant[~], constant[ ]]]
return[binary_operation[binary_operation[name[para_header] + name[line]] + name[para_footer]]] | keyword[def] identifier[make_html_para] ( identifier[self] , identifier[words] ):
literal[string]
identifier[line] = literal[string]
keyword[if] identifier[words] :
identifier[line] = identifier[self] . identifier[make_html_word] ( identifier[words] [ literal[int] ])
keyword[for] identifier[word] keyword[in] identifier[words] [ literal[int] :]:
identifier[line] = identifier[line] + literal[string] + identifier[self] . identifier[make_html_word] ( identifier[word] )
identifier[line] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[line] )
identifier[line] = identifier[string] . identifier[replace] ( identifier[line] , literal[string] , literal[string] )
keyword[return] identifier[para_header] + identifier[line] + identifier[para_footer] | def make_html_para(self, words):
""" convert words of a paragraph into tagged HTML text, handle xrefs """
line = ''
if words:
line = self.make_html_word(words[0])
for word in words[1:]:
line = line + ' ' + self.make_html_word(word) # depends on [control=['for'], data=['word']]
# convert `...' quotations into real left and right single quotes
line = re.sub("(^|\\W)`(.*?)'(\\W|$)", '\\1‘\\2’\\3', line)
# convert tilde into non-breakable space
line = string.replace(line, '~', ' ') # depends on [control=['if'], data=[]]
return para_header + line + para_footer |
def parse_argv(self, argv=None):
# type: (Optional[tuple]) -> dict
"""
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
arguments = vars(self.create_argument_parser().parse_args(argv))
seed = None
if self.requires_seed:
seed_filepath = arguments.pop('seed_file')
seed = (
self.seed_from_filepath(seed_filepath)
if seed_filepath
else self.prompt_for_seed()
)
arguments['api'] = Iota(
adapter=arguments.pop('uri'),
seed=seed,
testnet=arguments.pop('testnet'),
)
return arguments | def function[parse_argv, parameter[self, argv]]:
constant[
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
]
variable[arguments] assign[=] call[name[vars], parameter[call[call[name[self].create_argument_parser, parameter[]].parse_args, parameter[name[argv]]]]]
variable[seed] assign[=] constant[None]
if name[self].requires_seed begin[:]
variable[seed_filepath] assign[=] call[name[arguments].pop, parameter[constant[seed_file]]]
variable[seed] assign[=] <ast.IfExp object at 0x7da204963100>
call[name[arguments]][constant[api]] assign[=] call[name[Iota], parameter[]]
return[name[arguments]] | keyword[def] identifier[parse_argv] ( identifier[self] , identifier[argv] = keyword[None] ):
literal[string]
identifier[arguments] = identifier[vars] ( identifier[self] . identifier[create_argument_parser] (). identifier[parse_args] ( identifier[argv] ))
identifier[seed] = keyword[None]
keyword[if] identifier[self] . identifier[requires_seed] :
identifier[seed_filepath] = identifier[arguments] . identifier[pop] ( literal[string] )
identifier[seed] =(
identifier[self] . identifier[seed_from_filepath] ( identifier[seed_filepath] )
keyword[if] identifier[seed_filepath]
keyword[else] identifier[self] . identifier[prompt_for_seed] ()
)
identifier[arguments] [ literal[string] ]= identifier[Iota] (
identifier[adapter] = identifier[arguments] . identifier[pop] ( literal[string] ),
identifier[seed] = identifier[seed] ,
identifier[testnet] = identifier[arguments] . identifier[pop] ( literal[string] ),
)
keyword[return] identifier[arguments] | def parse_argv(self, argv=None):
# type: (Optional[tuple]) -> dict
'\n Parses arguments for the command.\n\n :param argv:\n Arguments to pass to the argument parser.\n If ``None``, defaults to ``sys.argv[1:]``.\n '
arguments = vars(self.create_argument_parser().parse_args(argv))
seed = None
if self.requires_seed:
seed_filepath = arguments.pop('seed_file')
seed = self.seed_from_filepath(seed_filepath) if seed_filepath else self.prompt_for_seed() # depends on [control=['if'], data=[]]
arguments['api'] = Iota(adapter=arguments.pop('uri'), seed=seed, testnet=arguments.pop('testnet'))
return arguments |
def police_priority_map_conform_map_pri3_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri3_conform = ET.SubElement(conform, "map-pri3-conform")
map_pri3_conform.text = kwargs.pop('map_pri3_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[police_priority_map_conform_map_pri3_conform, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[police_priority_map] assign[=] call[name[ET].SubElement, parameter[name[config], constant[police-priority-map]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[police_priority_map], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[conform] assign[=] call[name[ET].SubElement, parameter[name[police_priority_map], constant[conform]]]
variable[map_pri3_conform] assign[=] call[name[ET].SubElement, parameter[name[conform], constant[map-pri3-conform]]]
name[map_pri3_conform].text assign[=] call[name[kwargs].pop, parameter[constant[map_pri3_conform]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[police_priority_map_conform_map_pri3_conform] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[police_priority_map] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[police_priority_map] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[conform] = identifier[ET] . identifier[SubElement] ( identifier[police_priority_map] , literal[string] )
identifier[map_pri3_conform] = identifier[ET] . identifier[SubElement] ( identifier[conform] , literal[string] )
identifier[map_pri3_conform] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def police_priority_map_conform_map_pri3_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
police_priority_map = ET.SubElement(config, 'police-priority-map', xmlns='urn:brocade.com:mgmt:brocade-policer')
name_key = ET.SubElement(police_priority_map, 'name')
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, 'conform')
map_pri3_conform = ET.SubElement(conform, 'map-pri3-conform')
map_pri3_conform.text = kwargs.pop('map_pri3_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _stop(sas, cmd_args):
"""Stop the service if no jobs are running unless force is set"""
if not cmd_args.force:
status = sas.get_instance_status()
jobs = int(status['job_count'])
if jobs:
return status
return sas.stop_instance() | def function[_stop, parameter[sas, cmd_args]]:
constant[Stop the service if no jobs are running unless force is set]
if <ast.UnaryOp object at 0x7da1b1b84100> begin[:]
variable[status] assign[=] call[name[sas].get_instance_status, parameter[]]
variable[jobs] assign[=] call[name[int], parameter[call[name[status]][constant[job_count]]]]
if name[jobs] begin[:]
return[name[status]]
return[call[name[sas].stop_instance, parameter[]]] | keyword[def] identifier[_stop] ( identifier[sas] , identifier[cmd_args] ):
literal[string]
keyword[if] keyword[not] identifier[cmd_args] . identifier[force] :
identifier[status] = identifier[sas] . identifier[get_instance_status] ()
identifier[jobs] = identifier[int] ( identifier[status] [ literal[string] ])
keyword[if] identifier[jobs] :
keyword[return] identifier[status]
keyword[return] identifier[sas] . identifier[stop_instance] () | def _stop(sas, cmd_args):
"""Stop the service if no jobs are running unless force is set"""
if not cmd_args.force:
status = sas.get_instance_status()
jobs = int(status['job_count'])
if jobs:
return status # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return sas.stop_instance() |
def loads(s, single=False):
"""
Deserialize DMRX string representations
Args:
s (str): a DMRX string
single (bool): if `True`, only return the first Xmrs object
Returns:
a generator of Xmrs objects (unless *single* is `True`)
"""
corpus = etree.fromstring(s)
if single:
ds = _deserialize_dmrs(next(iter(corpus)))
else:
ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus)
return ds | def function[loads, parameter[s, single]]:
constant[
Deserialize DMRX string representations
Args:
s (str): a DMRX string
single (bool): if `True`, only return the first Xmrs object
Returns:
a generator of Xmrs objects (unless *single* is `True`)
]
variable[corpus] assign[=] call[name[etree].fromstring, parameter[name[s]]]
if name[single] begin[:]
variable[ds] assign[=] call[name[_deserialize_dmrs], parameter[call[name[next], parameter[call[name[iter], parameter[name[corpus]]]]]]]
return[name[ds]] | keyword[def] identifier[loads] ( identifier[s] , identifier[single] = keyword[False] ):
literal[string]
identifier[corpus] = identifier[etree] . identifier[fromstring] ( identifier[s] )
keyword[if] identifier[single] :
identifier[ds] = identifier[_deserialize_dmrs] ( identifier[next] ( identifier[iter] ( identifier[corpus] )))
keyword[else] :
identifier[ds] =( identifier[_deserialize_dmrs] ( identifier[dmrs_elem] ) keyword[for] identifier[dmrs_elem] keyword[in] identifier[corpus] )
keyword[return] identifier[ds] | def loads(s, single=False):
"""
Deserialize DMRX string representations
Args:
s (str): a DMRX string
single (bool): if `True`, only return the first Xmrs object
Returns:
a generator of Xmrs objects (unless *single* is `True`)
"""
corpus = etree.fromstring(s)
if single:
ds = _deserialize_dmrs(next(iter(corpus))) # depends on [control=['if'], data=[]]
else:
ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus)
return ds |
def _fletcher16_checksum(self, data):
"""
Calculates a fletcher16 checksum for the list of bytes
:param data: a list of bytes that comprise the message
:return:
"""
sum1 = 0
sum2 = 0
for i, b in enumerate(data):
sum1 += b
sum1 &= 0xff # Results wrapped at 16 bits
sum2 += sum1
sum2 &= 0xff
logger.debug('sum1: {} sum2: {}'.format(sum1, sum2))
return sum1, sum2 | def function[_fletcher16_checksum, parameter[self, data]]:
constant[
Calculates a fletcher16 checksum for the list of bytes
:param data: a list of bytes that comprise the message
:return:
]
variable[sum1] assign[=] constant[0]
variable[sum2] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1afefa650>, <ast.Name object at 0x7da1afefa620>]]] in starred[call[name[enumerate], parameter[name[data]]]] begin[:]
<ast.AugAssign object at 0x7da1afef9ea0>
<ast.AugAssign object at 0x7da1afef9f30>
<ast.AugAssign object at 0x7da1afefa710>
<ast.AugAssign object at 0x7da1afefa500>
call[name[logger].debug, parameter[call[constant[sum1: {} sum2: {}].format, parameter[name[sum1], name[sum2]]]]]
return[tuple[[<ast.Name object at 0x7da1b008f5b0>, <ast.Name object at 0x7da1b008c0a0>]]] | keyword[def] identifier[_fletcher16_checksum] ( identifier[self] , identifier[data] ):
literal[string]
identifier[sum1] = literal[int]
identifier[sum2] = literal[int]
keyword[for] identifier[i] , identifier[b] keyword[in] identifier[enumerate] ( identifier[data] ):
identifier[sum1] += identifier[b]
identifier[sum1] &= literal[int]
identifier[sum2] += identifier[sum1]
identifier[sum2] &= literal[int]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sum1] , identifier[sum2] ))
keyword[return] identifier[sum1] , identifier[sum2] | def _fletcher16_checksum(self, data):
"""
Calculates a fletcher16 checksum for the list of bytes
:param data: a list of bytes that comprise the message
:return:
"""
sum1 = 0
sum2 = 0
for (i, b) in enumerate(data):
sum1 += b
sum1 &= 255 # Results wrapped at 16 bits
sum2 += sum1
sum2 &= 255 # depends on [control=['for'], data=[]]
logger.debug('sum1: {} sum2: {}'.format(sum1, sum2))
return (sum1, sum2) |
def switchport_list(self):
"""list[dict]:A list of dictionary items describing the details
of list of dictionary items describing the details of switch port"""
urn = "{urn:brocade.com:mgmt:brocade-interface-ext}"
result = []
request_interface = self.get_interface_switchport_request()
interface_result = self._callback(request_interface, 'get')
for interface in interface_result.findall('%sswitchport' % urn):
vlans = []
interface_type = self.get_node_value(interface, '%sinterface-type',
urn)
interface_name = self.get_node_value(interface, '%sinterface-name',
urn)
mode = self.get_node_value(interface, '%smode', urn)
intf = interface.find('%sactive-vlans' % urn)
for vlan_node in intf.findall('%svlanid' % urn):
vlan = vlan_node.text
vlans.append(vlan)
results = {'vlan-id': vlans,
'mode': mode,
'interface-name': interface_name,
'interface_type': interface_type}
result.append(results)
return result | def function[switchport_list, parameter[self]]:
constant[list[dict]:A list of dictionary items describing the details
of list of dictionary items describing the details of switch port]
variable[urn] assign[=] constant[{urn:brocade.com:mgmt:brocade-interface-ext}]
variable[result] assign[=] list[[]]
variable[request_interface] assign[=] call[name[self].get_interface_switchport_request, parameter[]]
variable[interface_result] assign[=] call[name[self]._callback, parameter[name[request_interface], constant[get]]]
for taget[name[interface]] in starred[call[name[interface_result].findall, parameter[binary_operation[constant[%sswitchport] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
variable[vlans] assign[=] list[[]]
variable[interface_type] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%sinterface-type], name[urn]]]
variable[interface_name] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%sinterface-name], name[urn]]]
variable[mode] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%smode], name[urn]]]
variable[intf] assign[=] call[name[interface].find, parameter[binary_operation[constant[%sactive-vlans] <ast.Mod object at 0x7da2590d6920> name[urn]]]]
for taget[name[vlan_node]] in starred[call[name[intf].findall, parameter[binary_operation[constant[%svlanid] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
variable[vlan] assign[=] name[vlan_node].text
call[name[vlans].append, parameter[name[vlan]]]
variable[results] assign[=] dictionary[[<ast.Constant object at 0x7da207f02770>, <ast.Constant object at 0x7da207f013c0>, <ast.Constant object at 0x7da207f02620>, <ast.Constant object at 0x7da207f00b80>], [<ast.Name object at 0x7da207f02830>, <ast.Name object at 0x7da207f00100>, <ast.Name object at 0x7da207f01030>, <ast.Name object at 0x7da207f016c0>]]
call[name[result].append, parameter[name[results]]]
return[name[result]] | keyword[def] identifier[switchport_list] ( identifier[self] ):
literal[string]
identifier[urn] = literal[string]
identifier[result] =[]
identifier[request_interface] = identifier[self] . identifier[get_interface_switchport_request] ()
identifier[interface_result] = identifier[self] . identifier[_callback] ( identifier[request_interface] , literal[string] )
keyword[for] identifier[interface] keyword[in] identifier[interface_result] . identifier[findall] ( literal[string] % identifier[urn] ):
identifier[vlans] =[]
identifier[interface_type] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] ,
identifier[urn] )
identifier[interface_name] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] ,
identifier[urn] )
identifier[mode] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] , identifier[urn] )
identifier[intf] = identifier[interface] . identifier[find] ( literal[string] % identifier[urn] )
keyword[for] identifier[vlan_node] keyword[in] identifier[intf] . identifier[findall] ( literal[string] % identifier[urn] ):
identifier[vlan] = identifier[vlan_node] . identifier[text]
identifier[vlans] . identifier[append] ( identifier[vlan] )
identifier[results] ={ literal[string] : identifier[vlans] ,
literal[string] : identifier[mode] ,
literal[string] : identifier[interface_name] ,
literal[string] : identifier[interface_type] }
identifier[result] . identifier[append] ( identifier[results] )
keyword[return] identifier[result] | def switchport_list(self):
"""list[dict]:A list of dictionary items describing the details
of list of dictionary items describing the details of switch port"""
urn = '{urn:brocade.com:mgmt:brocade-interface-ext}'
result = []
request_interface = self.get_interface_switchport_request()
interface_result = self._callback(request_interface, 'get')
for interface in interface_result.findall('%sswitchport' % urn):
vlans = []
interface_type = self.get_node_value(interface, '%sinterface-type', urn)
interface_name = self.get_node_value(interface, '%sinterface-name', urn)
mode = self.get_node_value(interface, '%smode', urn)
intf = interface.find('%sactive-vlans' % urn)
for vlan_node in intf.findall('%svlanid' % urn):
vlan = vlan_node.text
vlans.append(vlan) # depends on [control=['for'], data=['vlan_node']]
results = {'vlan-id': vlans, 'mode': mode, 'interface-name': interface_name, 'interface_type': interface_type}
result.append(results) # depends on [control=['for'], data=['interface']]
return result |
def get_view(self, request, view_class, opts=None):
"""
Instantiates and returns the view class that will generate the
actual context for this plugin.
"""
kwargs = {}
if opts:
if not isinstance(opts, dict):
opts = opts.__dict__
else:
opts = {}
if not view_class in VALID_MIXIN_OPTIONS:
valid_options = view_class.__dict__.keys()
for cls in view_class.__bases__:
if cls != object:
valid_options += cls.__dict__.keys()
VALID_MIXIN_OPTIONS[view_class] = valid_options
for key in VALID_MIXIN_OPTIONS[view_class]:
if key in opts:
kwargs[key] = opts[key]
elif hasattr(self, key):
kwargs[key] = getattr(self, key)
view = view_class(**kwargs)
view.request = request
view.kwargs = {}
return view | def function[get_view, parameter[self, request, view_class, opts]]:
constant[
Instantiates and returns the view class that will generate the
actual context for this plugin.
]
variable[kwargs] assign[=] dictionary[[], []]
if name[opts] begin[:]
if <ast.UnaryOp object at 0x7da1b183b100> begin[:]
variable[opts] assign[=] name[opts].__dict__
if <ast.UnaryOp object at 0x7da1b18392a0> begin[:]
variable[valid_options] assign[=] call[name[view_class].__dict__.keys, parameter[]]
for taget[name[cls]] in starred[name[view_class].__bases__] begin[:]
if compare[name[cls] not_equal[!=] name[object]] begin[:]
<ast.AugAssign object at 0x7da1b183a7a0>
call[name[VALID_MIXIN_OPTIONS]][name[view_class]] assign[=] name[valid_options]
for taget[name[key]] in starred[call[name[VALID_MIXIN_OPTIONS]][name[view_class]]] begin[:]
if compare[name[key] in name[opts]] begin[:]
call[name[kwargs]][name[key]] assign[=] call[name[opts]][name[key]]
variable[view] assign[=] call[name[view_class], parameter[]]
name[view].request assign[=] name[request]
name[view].kwargs assign[=] dictionary[[], []]
return[name[view]] | keyword[def] identifier[get_view] ( identifier[self] , identifier[request] , identifier[view_class] , identifier[opts] = keyword[None] ):
literal[string]
identifier[kwargs] ={}
keyword[if] identifier[opts] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[opts] , identifier[dict] ):
identifier[opts] = identifier[opts] . identifier[__dict__]
keyword[else] :
identifier[opts] ={}
keyword[if] keyword[not] identifier[view_class] keyword[in] identifier[VALID_MIXIN_OPTIONS] :
identifier[valid_options] = identifier[view_class] . identifier[__dict__] . identifier[keys] ()
keyword[for] identifier[cls] keyword[in] identifier[view_class] . identifier[__bases__] :
keyword[if] identifier[cls] != identifier[object] :
identifier[valid_options] += identifier[cls] . identifier[__dict__] . identifier[keys] ()
identifier[VALID_MIXIN_OPTIONS] [ identifier[view_class] ]= identifier[valid_options]
keyword[for] identifier[key] keyword[in] identifier[VALID_MIXIN_OPTIONS] [ identifier[view_class] ]:
keyword[if] identifier[key] keyword[in] identifier[opts] :
identifier[kwargs] [ identifier[key] ]= identifier[opts] [ identifier[key] ]
keyword[elif] identifier[hasattr] ( identifier[self] , identifier[key] ):
identifier[kwargs] [ identifier[key] ]= identifier[getattr] ( identifier[self] , identifier[key] )
identifier[view] = identifier[view_class] (** identifier[kwargs] )
identifier[view] . identifier[request] = identifier[request]
identifier[view] . identifier[kwargs] ={}
keyword[return] identifier[view] | def get_view(self, request, view_class, opts=None):
"""
Instantiates and returns the view class that will generate the
actual context for this plugin.
"""
kwargs = {}
if opts:
if not isinstance(opts, dict):
opts = opts.__dict__ # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
opts = {}
if not view_class in VALID_MIXIN_OPTIONS:
valid_options = view_class.__dict__.keys()
for cls in view_class.__bases__:
if cls != object:
valid_options += cls.__dict__.keys() # depends on [control=['if'], data=['cls']] # depends on [control=['for'], data=['cls']]
VALID_MIXIN_OPTIONS[view_class] = valid_options # depends on [control=['if'], data=[]]
for key in VALID_MIXIN_OPTIONS[view_class]:
if key in opts:
kwargs[key] = opts[key] # depends on [control=['if'], data=['key', 'opts']]
elif hasattr(self, key):
kwargs[key] = getattr(self, key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
view = view_class(**kwargs)
view.request = request
view.kwargs = {}
return view |
def flexbar_barplot (self):
""" Make the HighCharts HTML to plot the flexbar rates """
# Specify the order of the different possible categories
keys = OrderedDict()
keys['remaining_reads'] = { 'color': '#437bb1', 'name': 'Remaining reads' }
keys['skipped_due_to_uncalled_bases'] = { 'color': '#e63491', 'name': 'Skipped due to uncalled bases' }
keys['short_prior_to_adapter_removal'] = { 'color': '#b1084c', 'name': 'Short prior to adapter removal' }
keys['finally_skipped_short_reads'] = { 'color': '#7f0000', 'name': 'Finally skipped short reads' }
# Config for the plot
pconfig = {
'id': 'flexbar_plot',
'title': 'Flexbar: Processed Reads',
'ylab': '# Reads',
'cpswitch_counts_label': 'Number of Reads',
'hide_zero_cats': False
}
self.add_section( plot = bargraph.plot(self.flexbar_data, keys, pconfig) ) | def function[flexbar_barplot, parameter[self]]:
constant[ Make the HighCharts HTML to plot the flexbar rates ]
variable[keys] assign[=] call[name[OrderedDict], parameter[]]
call[name[keys]][constant[remaining_reads]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55750>, <ast.Constant object at 0x7da18eb57ee0>], [<ast.Constant object at 0x7da18eb55780>, <ast.Constant object at 0x7da18eb56cb0>]]
call[name[keys]][constant[skipped_due_to_uncalled_bases]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb569e0>, <ast.Constant object at 0x7da18eb546d0>], [<ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb56530>]]
call[name[keys]][constant[short_prior_to_adapter_removal]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb54d30>, <ast.Constant object at 0x7da18eb558a0>], [<ast.Constant object at 0x7da18eb55840>, <ast.Constant object at 0x7da18eb56740>]]
call[name[keys]][constant[finally_skipped_short_reads]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57040>, <ast.Constant object at 0x7da18eb54160>], [<ast.Constant object at 0x7da18eb57220>, <ast.Constant object at 0x7da18eb557e0>]]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57190>, <ast.Constant object at 0x7da18eb57d00>, <ast.Constant object at 0x7da18eb543a0>, <ast.Constant object at 0x7da18eb54c40>, <ast.Constant object at 0x7da18eb55180>], [<ast.Constant object at 0x7da18eb57a30>, <ast.Constant object at 0x7da18eb54040>, <ast.Constant object at 0x7da18eb55d50>, <ast.Constant object at 0x7da18eb54bb0>, <ast.Constant object at 0x7da18eb57dc0>]]
call[name[self].add_section, parameter[]] | keyword[def] identifier[flexbar_barplot] ( identifier[self] ):
literal[string]
identifier[keys] = identifier[OrderedDict] ()
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[False]
}
identifier[self] . identifier[add_section] ( identifier[plot] = identifier[bargraph] . identifier[plot] ( identifier[self] . identifier[flexbar_data] , identifier[keys] , identifier[pconfig] )) | def flexbar_barplot(self):
""" Make the HighCharts HTML to plot the flexbar rates """
# Specify the order of the different possible categories
keys = OrderedDict()
keys['remaining_reads'] = {'color': '#437bb1', 'name': 'Remaining reads'}
keys['skipped_due_to_uncalled_bases'] = {'color': '#e63491', 'name': 'Skipped due to uncalled bases'}
keys['short_prior_to_adapter_removal'] = {'color': '#b1084c', 'name': 'Short prior to adapter removal'}
keys['finally_skipped_short_reads'] = {'color': '#7f0000', 'name': 'Finally skipped short reads'}
# Config for the plot
pconfig = {'id': 'flexbar_plot', 'title': 'Flexbar: Processed Reads', 'ylab': '# Reads', 'cpswitch_counts_label': 'Number of Reads', 'hide_zero_cats': False}
self.add_section(plot=bargraph.plot(self.flexbar_data, keys, pconfig)) |
def column_aggregate_expectation(cls, func):
"""Constructs an expectation using column-aggregate semantics.
"""
if PY3:
argspec = inspect.getfullargspec(func)[0][1:]
else:
argspec = inspect.getargspec(func)[0][1:]
@cls.expectation(argspec)
@wraps(func)
def inner_wrapper(self, column, result_format=None, *args, **kwargs):
if result_format is None:
result_format = self.default_expectation_args["result_format"]
result_format = parse_result_format(result_format)
evaluation_result = func(self, column, *args, **kwargs)
if 'success' not in evaluation_result:
raise ValueError(
"Column aggregate expectation failed to return required information: success")
if 'result' not in evaluation_result:
raise ValueError(
"Column aggregate expectation failed to return required information: result")
if 'observed_value' not in evaluation_result['result']:
raise ValueError(
"Column aggregate expectation failed to return required information: result.observed_value")
return_obj = {
'success': bool(evaluation_result['success'])
}
if result_format['result_format'] == 'BOOLEAN_ONLY':
return return_obj
# Use the element and null count information from a column_aggregate_expectation if it is needed
# it anyway to avoid an extra trip to the database
if 'element_count' not in evaluation_result and 'null_count' not in evaluation_result:
count_query = sa.select([
sa.func.count().label('element_count'),
sa.func.sum(
sa.case([(sa.column(column) == None, 1)], else_=0)
).label('null_count'),
]).select_from(self._table)
count_results = dict(
self.engine.execute(count_query).fetchone())
# Handle case of empty table gracefully:
if "element_count" not in count_results or count_results["element_count"] is None:
count_results["element_count"] = 0
if "null_count" not in count_results or count_results["null_count"] is None:
count_results["null_count"] = 0
return_obj['result'] = {
'observed_value': evaluation_result['result']['observed_value'],
"element_count": count_results['element_count'],
"missing_count": count_results['null_count'],
"missing_percent": count_results['null_count'] / count_results['element_count'] if count_results['element_count'] > 0 else None
}
else:
return_obj['result'] = {
'observed_value': evaluation_result['result']['observed_value'],
"element_count": evaluation_result["element_count"],
"missing_count": evaluation_result["null_count"],
"missing_percent": evaluation_result['null_count'] / evaluation_result['element_count'] if evaluation_result['element_count'] > 0 else None
}
if result_format['result_format'] == 'BASIC':
return return_obj
if 'details' in evaluation_result['result']:
return_obj['result']['details'] = evaluation_result['result']['details']
if result_format['result_format'] in ["SUMMARY", "COMPLETE"]:
return return_obj
raise ValueError("Unknown result_format %s." %
(result_format['result_format'],))
return inner_wrapper | def function[column_aggregate_expectation, parameter[cls, func]]:
constant[Constructs an expectation using column-aggregate semantics.
]
if name[PY3] begin[:]
variable[argspec] assign[=] call[call[call[name[inspect].getfullargspec, parameter[name[func]]]][constant[0]]][<ast.Slice object at 0x7da1b1711de0>]
def function[inner_wrapper, parameter[self, column, result_format]]:
if compare[name[result_format] is constant[None]] begin[:]
variable[result_format] assign[=] call[name[self].default_expectation_args][constant[result_format]]
variable[result_format] assign[=] call[name[parse_result_format], parameter[name[result_format]]]
variable[evaluation_result] assign[=] call[name[func], parameter[name[self], name[column], <ast.Starred object at 0x7da1b1710490>]]
if compare[constant[success] <ast.NotIn object at 0x7da2590d7190> name[evaluation_result]] begin[:]
<ast.Raise object at 0x7da1b1713c70>
if compare[constant[result] <ast.NotIn object at 0x7da2590d7190> name[evaluation_result]] begin[:]
<ast.Raise object at 0x7da1b1712d70>
if compare[constant[observed_value] <ast.NotIn object at 0x7da2590d7190> call[name[evaluation_result]][constant[result]]] begin[:]
<ast.Raise object at 0x7da1b1711c90>
variable[return_obj] assign[=] dictionary[[<ast.Constant object at 0x7da1b17139d0>], [<ast.Call object at 0x7da1b17111b0>]]
if compare[call[name[result_format]][constant[result_format]] equal[==] constant[BOOLEAN_ONLY]] begin[:]
return[name[return_obj]]
if <ast.BoolOp object at 0x7da1b1710ac0> begin[:]
variable[count_query] assign[=] call[call[name[sa].select, parameter[list[[<ast.Call object at 0x7da1b1710340>, <ast.Call object at 0x7da1b1713850>]]]].select_from, parameter[name[self]._table]]
variable[count_results] assign[=] call[name[dict], parameter[call[call[name[self].engine.execute, parameter[name[count_query]]].fetchone, parameter[]]]]
if <ast.BoolOp object at 0x7da1b1713100> begin[:]
call[name[count_results]][constant[element_count]] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b18a0820> begin[:]
call[name[count_results]][constant[null_count]] assign[=] constant[0]
call[name[return_obj]][constant[result]] assign[=] dictionary[[<ast.Constant object at 0x7da1b17a0c40>, <ast.Constant object at 0x7da1b17a0ca0>, <ast.Constant object at 0x7da1b17a0c10>, <ast.Constant object at 0x7da1b17a0bb0>], [<ast.Subscript object at 0x7da1b17a0910>, <ast.Subscript object at 0x7da1b17a16c0>, <ast.Subscript object at 0x7da1b17a1600>, <ast.IfExp object at 0x7da1b17a0370>]]
if compare[call[name[result_format]][constant[result_format]] equal[==] constant[BASIC]] begin[:]
return[name[return_obj]]
if compare[constant[details] in call[name[evaluation_result]][constant[result]]] begin[:]
call[call[name[return_obj]][constant[result]]][constant[details]] assign[=] call[call[name[evaluation_result]][constant[result]]][constant[details]]
if compare[call[name[result_format]][constant[result_format]] in list[[<ast.Constant object at 0x7da1b1769c60>, <ast.Constant object at 0x7da1b176af20>]]] begin[:]
return[name[return_obj]]
<ast.Raise object at 0x7da1b176aa40>
return[name[inner_wrapper]] | keyword[def] identifier[column_aggregate_expectation] ( identifier[cls] , identifier[func] ):
literal[string]
keyword[if] identifier[PY3] :
identifier[argspec] = identifier[inspect] . identifier[getfullargspec] ( identifier[func] )[ literal[int] ][ literal[int] :]
keyword[else] :
identifier[argspec] = identifier[inspect] . identifier[getargspec] ( identifier[func] )[ literal[int] ][ literal[int] :]
@ identifier[cls] . identifier[expectation] ( identifier[argspec] )
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[inner_wrapper] ( identifier[self] , identifier[column] , identifier[result_format] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[result_format] keyword[is] keyword[None] :
identifier[result_format] = identifier[self] . identifier[default_expectation_args] [ literal[string] ]
identifier[result_format] = identifier[parse_result_format] ( identifier[result_format] )
identifier[evaluation_result] = identifier[func] ( identifier[self] , identifier[column] ,* identifier[args] ,** identifier[kwargs] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[evaluation_result] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[evaluation_result] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[evaluation_result] [ literal[string] ]:
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[return_obj] ={
literal[string] : identifier[bool] ( identifier[evaluation_result] [ literal[string] ])
}
keyword[if] identifier[result_format] [ literal[string] ]== literal[string] :
keyword[return] identifier[return_obj]
keyword[if] literal[string] keyword[not] keyword[in] identifier[evaluation_result] keyword[and] literal[string] keyword[not] keyword[in] identifier[evaluation_result] :
identifier[count_query] = identifier[sa] . identifier[select] ([
identifier[sa] . identifier[func] . identifier[count] (). identifier[label] ( literal[string] ),
identifier[sa] . identifier[func] . identifier[sum] (
identifier[sa] . identifier[case] ([( identifier[sa] . identifier[column] ( identifier[column] )== keyword[None] , literal[int] )], identifier[else_] = literal[int] )
). identifier[label] ( literal[string] ),
]). identifier[select_from] ( identifier[self] . identifier[_table] )
identifier[count_results] = identifier[dict] (
identifier[self] . identifier[engine] . identifier[execute] ( identifier[count_query] ). identifier[fetchone] ())
keyword[if] literal[string] keyword[not] keyword[in] identifier[count_results] keyword[or] identifier[count_results] [ literal[string] ] keyword[is] keyword[None] :
identifier[count_results] [ literal[string] ]= literal[int]
keyword[if] literal[string] keyword[not] keyword[in] identifier[count_results] keyword[or] identifier[count_results] [ literal[string] ] keyword[is] keyword[None] :
identifier[count_results] [ literal[string] ]= literal[int]
identifier[return_obj] [ literal[string] ]={
literal[string] : identifier[evaluation_result] [ literal[string] ][ literal[string] ],
literal[string] : identifier[count_results] [ literal[string] ],
literal[string] : identifier[count_results] [ literal[string] ],
literal[string] : identifier[count_results] [ literal[string] ]/ identifier[count_results] [ literal[string] ] keyword[if] identifier[count_results] [ literal[string] ]> literal[int] keyword[else] keyword[None]
}
keyword[else] :
identifier[return_obj] [ literal[string] ]={
literal[string] : identifier[evaluation_result] [ literal[string] ][ literal[string] ],
literal[string] : identifier[evaluation_result] [ literal[string] ],
literal[string] : identifier[evaluation_result] [ literal[string] ],
literal[string] : identifier[evaluation_result] [ literal[string] ]/ identifier[evaluation_result] [ literal[string] ] keyword[if] identifier[evaluation_result] [ literal[string] ]> literal[int] keyword[else] keyword[None]
}
keyword[if] identifier[result_format] [ literal[string] ]== literal[string] :
keyword[return] identifier[return_obj]
keyword[if] literal[string] keyword[in] identifier[evaluation_result] [ literal[string] ]:
identifier[return_obj] [ literal[string] ][ literal[string] ]= identifier[evaluation_result] [ literal[string] ][ literal[string] ]
keyword[if] identifier[result_format] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[return_obj]
keyword[raise] identifier[ValueError] ( literal[string] %
( identifier[result_format] [ literal[string] ],))
keyword[return] identifier[inner_wrapper] | def column_aggregate_expectation(cls, func):
"""Constructs an expectation using column-aggregate semantics.
"""
if PY3:
argspec = inspect.getfullargspec(func)[0][1:] # depends on [control=['if'], data=[]]
else:
argspec = inspect.getargspec(func)[0][1:]
@cls.expectation(argspec)
@wraps(func)
def inner_wrapper(self, column, result_format=None, *args, **kwargs):
if result_format is None:
result_format = self.default_expectation_args['result_format'] # depends on [control=['if'], data=['result_format']]
result_format = parse_result_format(result_format)
evaluation_result = func(self, column, *args, **kwargs)
if 'success' not in evaluation_result:
raise ValueError('Column aggregate expectation failed to return required information: success') # depends on [control=['if'], data=[]]
if 'result' not in evaluation_result:
raise ValueError('Column aggregate expectation failed to return required information: result') # depends on [control=['if'], data=[]]
if 'observed_value' not in evaluation_result['result']:
raise ValueError('Column aggregate expectation failed to return required information: result.observed_value') # depends on [control=['if'], data=[]]
return_obj = {'success': bool(evaluation_result['success'])}
if result_format['result_format'] == 'BOOLEAN_ONLY':
return return_obj # depends on [control=['if'], data=[]]
# Use the element and null count information from a column_aggregate_expectation if it is needed
# it anyway to avoid an extra trip to the database
if 'element_count' not in evaluation_result and 'null_count' not in evaluation_result:
count_query = sa.select([sa.func.count().label('element_count'), sa.func.sum(sa.case([(sa.column(column) == None, 1)], else_=0)).label('null_count')]).select_from(self._table)
count_results = dict(self.engine.execute(count_query).fetchone())
# Handle case of empty table gracefully:
if 'element_count' not in count_results or count_results['element_count'] is None:
count_results['element_count'] = 0 # depends on [control=['if'], data=[]]
if 'null_count' not in count_results or count_results['null_count'] is None:
count_results['null_count'] = 0 # depends on [control=['if'], data=[]]
return_obj['result'] = {'observed_value': evaluation_result['result']['observed_value'], 'element_count': count_results['element_count'], 'missing_count': count_results['null_count'], 'missing_percent': count_results['null_count'] / count_results['element_count'] if count_results['element_count'] > 0 else None} # depends on [control=['if'], data=[]]
else:
return_obj['result'] = {'observed_value': evaluation_result['result']['observed_value'], 'element_count': evaluation_result['element_count'], 'missing_count': evaluation_result['null_count'], 'missing_percent': evaluation_result['null_count'] / evaluation_result['element_count'] if evaluation_result['element_count'] > 0 else None}
if result_format['result_format'] == 'BASIC':
return return_obj # depends on [control=['if'], data=[]]
if 'details' in evaluation_result['result']:
return_obj['result']['details'] = evaluation_result['result']['details'] # depends on [control=['if'], data=[]]
if result_format['result_format'] in ['SUMMARY', 'COMPLETE']:
return return_obj # depends on [control=['if'], data=[]]
raise ValueError('Unknown result_format %s.' % (result_format['result_format'],))
return inner_wrapper |
def get_value(self, merge=True, createfunc=None,
expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
"""
cache, cache_key = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time)
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value | def function[get_value, parameter[self, merge, createfunc, expiration_time, ignore_expiration]]:
constant[
Return the value from the cache for this query.
]
<ast.Tuple object at 0x7da20c76eb30> assign[=] call[name[self]._get_cache_plus_key, parameter[]]
assert[<ast.BoolOp object at 0x7da20c76d180>]
if <ast.BoolOp object at 0x7da20c76d720> begin[:]
variable[cached_value] assign[=] call[name[cache].get, parameter[name[cache_key]]]
if <ast.BoolOp object at 0x7da20c76ec80> begin[:]
variable[cached_value] assign[=] call[name[self].merge_result, parameter[name[cached_value]]]
return[name[cached_value]] | keyword[def] identifier[get_value] ( identifier[self] , identifier[merge] = keyword[True] , identifier[createfunc] = keyword[None] ,
identifier[expiration_time] = keyword[None] , identifier[ignore_expiration] = keyword[False] ):
literal[string]
identifier[cache] , identifier[cache_key] = identifier[self] . identifier[_get_cache_plus_key] ()
keyword[assert] keyword[not] identifier[ignore_expiration] keyword[or] keyword[not] identifier[createfunc] , literal[string]
keyword[if] identifier[ignore_expiration] keyword[or] keyword[not] identifier[createfunc] :
identifier[cached_value] = identifier[cache] . identifier[get] ( identifier[cache_key] ,
identifier[expiration_time] = identifier[expiration_time] ,
identifier[ignore_expiration] = identifier[ignore_expiration] )
keyword[else] :
identifier[cached_value] = identifier[cache] . identifier[get] ( identifier[cache_key] )
keyword[if] keyword[not] identifier[cached_value] :
identifier[cached_value] = identifier[createfunc] ()
identifier[cache] . identifier[set] ( identifier[cache_key] , identifier[cached_value] , identifier[timeout] = identifier[expiration_time] )
keyword[if] identifier[cached_value] keyword[and] identifier[merge] :
identifier[cached_value] = identifier[self] . identifier[merge_result] ( identifier[cached_value] , identifier[load] = keyword[False] )
keyword[return] identifier[cached_value] | def get_value(self, merge=True, createfunc=None, expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
"""
(cache, cache_key) = self._get_cache_plus_key()
# ignore_expiration means, if the value is in the cache
# but is expired, return it anyway. This doesn't make sense
# with createfunc, which says, if the value is expired, generate
# a new value.
assert not ignore_expiration or not createfunc, "Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = cache.get(cache_key, expiration_time=expiration_time, ignore_expiration=ignore_expiration) # depends on [control=['if'], data=[]]
else:
cached_value = cache.get(cache_key)
if not cached_value:
cached_value = createfunc()
cache.set(cache_key, cached_value, timeout=expiration_time) # depends on [control=['if'], data=[]]
if cached_value and merge:
cached_value = self.merge_result(cached_value, load=False) # depends on [control=['if'], data=[]]
return cached_value |
def _clear_queue(to_clear):
"""Clear all items from a queue safely."""
while not to_clear.empty():
try:
to_clear.get(False)
to_clear.task_done()
except queue.Empty:
continue | def function[_clear_queue, parameter[to_clear]]:
constant[Clear all items from a queue safely.]
while <ast.UnaryOp object at 0x7da1b0d30730> begin[:]
<ast.Try object at 0x7da1b0d33a60> | keyword[def] identifier[_clear_queue] ( identifier[to_clear] ):
literal[string]
keyword[while] keyword[not] identifier[to_clear] . identifier[empty] ():
keyword[try] :
identifier[to_clear] . identifier[get] ( keyword[False] )
identifier[to_clear] . identifier[task_done] ()
keyword[except] identifier[queue] . identifier[Empty] :
keyword[continue] | def _clear_queue(to_clear):
"""Clear all items from a queue safely."""
while not to_clear.empty():
try:
to_clear.get(False)
to_clear.task_done() # depends on [control=['try'], data=[]]
except queue.Empty:
continue # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def rotunicode(io_object, decode=False):
"""Rotate ASCII <-> non-ASCII characters in a file.
:param io_object:
The file object to convert.
:type io_object:
:class:`io.TextIOWrapper`
:param decode:
If True, perform a rotunicode-decode (rotate from non-ASCII to ASCII).
Defaults to False (rotate from ASCII to non-ASCII).
:type decode:
`bool`
:return:
Yield the converted lines of the file.
:rtype:
`generator` of `unicode`
"""
rotu_fn = get_rotunicode_function_for_decode_argument(decode=decode)
return map(rotu_fn, map(safe_unicode, stream_file_lines(io_object))) | def function[rotunicode, parameter[io_object, decode]]:
constant[Rotate ASCII <-> non-ASCII characters in a file.
:param io_object:
The file object to convert.
:type io_object:
:class:`io.TextIOWrapper`
:param decode:
If True, perform a rotunicode-decode (rotate from non-ASCII to ASCII).
Defaults to False (rotate from ASCII to non-ASCII).
:type decode:
`bool`
:return:
Yield the converted lines of the file.
:rtype:
`generator` of `unicode`
]
variable[rotu_fn] assign[=] call[name[get_rotunicode_function_for_decode_argument], parameter[]]
return[call[name[map], parameter[name[rotu_fn], call[name[map], parameter[name[safe_unicode], call[name[stream_file_lines], parameter[name[io_object]]]]]]]] | keyword[def] identifier[rotunicode] ( identifier[io_object] , identifier[decode] = keyword[False] ):
literal[string]
identifier[rotu_fn] = identifier[get_rotunicode_function_for_decode_argument] ( identifier[decode] = identifier[decode] )
keyword[return] identifier[map] ( identifier[rotu_fn] , identifier[map] ( identifier[safe_unicode] , identifier[stream_file_lines] ( identifier[io_object] ))) | def rotunicode(io_object, decode=False):
"""Rotate ASCII <-> non-ASCII characters in a file.
:param io_object:
The file object to convert.
:type io_object:
:class:`io.TextIOWrapper`
:param decode:
If True, perform a rotunicode-decode (rotate from non-ASCII to ASCII).
Defaults to False (rotate from ASCII to non-ASCII).
:type decode:
`bool`
:return:
Yield the converted lines of the file.
:rtype:
`generator` of `unicode`
"""
rotu_fn = get_rotunicode_function_for_decode_argument(decode=decode)
return map(rotu_fn, map(safe_unicode, stream_file_lines(io_object))) |
def _to_dict(self, include=None, exclude=None):
"""Return a dict containing the entity's property values.
Args:
include: Optional set of property names to include, default all.
exclude: Optional set of property names to skip, default none.
A name contained in both include and exclude is excluded.
"""
if (include is not None and
not isinstance(include, (list, tuple, set, frozenset))):
raise TypeError('include should be a list, tuple or set')
if (exclude is not None and
not isinstance(exclude, (list, tuple, set, frozenset))):
raise TypeError('exclude should be a list, tuple or set')
values = {}
for prop in self._properties.itervalues():
name = prop._code_name
if include is not None and name not in include:
continue
if exclude is not None and name in exclude:
continue
try:
values[name] = prop._get_for_dict(self)
except UnprojectedPropertyError:
pass # Ignore unprojected properties rather than failing.
return values | def function[_to_dict, parameter[self, include, exclude]]:
constant[Return a dict containing the entity's property values.
Args:
include: Optional set of property names to include, default all.
exclude: Optional set of property names to skip, default none.
A name contained in both include and exclude is excluded.
]
if <ast.BoolOp object at 0x7da1b10d7310> begin[:]
<ast.Raise object at 0x7da1b10d4a00>
if <ast.BoolOp object at 0x7da1b23446a0> begin[:]
<ast.Raise object at 0x7da1b2344a30>
variable[values] assign[=] dictionary[[], []]
for taget[name[prop]] in starred[call[name[self]._properties.itervalues, parameter[]]] begin[:]
variable[name] assign[=] name[prop]._code_name
if <ast.BoolOp object at 0x7da1b23457e0> begin[:]
continue
if <ast.BoolOp object at 0x7da1b23455a0> begin[:]
continue
<ast.Try object at 0x7da1b2346380>
return[name[values]] | keyword[def] identifier[_to_dict] ( identifier[self] , identifier[include] = keyword[None] , identifier[exclude] = keyword[None] ):
literal[string]
keyword[if] ( identifier[include] keyword[is] keyword[not] keyword[None] keyword[and]
keyword[not] identifier[isinstance] ( identifier[include] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[frozenset] ))):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] ( identifier[exclude] keyword[is] keyword[not] keyword[None] keyword[and]
keyword[not] identifier[isinstance] ( identifier[exclude] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[frozenset] ))):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[values] ={}
keyword[for] identifier[prop] keyword[in] identifier[self] . identifier[_properties] . identifier[itervalues] ():
identifier[name] = identifier[prop] . identifier[_code_name]
keyword[if] identifier[include] keyword[is] keyword[not] keyword[None] keyword[and] identifier[name] keyword[not] keyword[in] identifier[include] :
keyword[continue]
keyword[if] identifier[exclude] keyword[is] keyword[not] keyword[None] keyword[and] identifier[name] keyword[in] identifier[exclude] :
keyword[continue]
keyword[try] :
identifier[values] [ identifier[name] ]= identifier[prop] . identifier[_get_for_dict] ( identifier[self] )
keyword[except] identifier[UnprojectedPropertyError] :
keyword[pass]
keyword[return] identifier[values] | def _to_dict(self, include=None, exclude=None):
"""Return a dict containing the entity's property values.
Args:
include: Optional set of property names to include, default all.
exclude: Optional set of property names to skip, default none.
A name contained in both include and exclude is excluded.
"""
if include is not None and (not isinstance(include, (list, tuple, set, frozenset))):
raise TypeError('include should be a list, tuple or set') # depends on [control=['if'], data=[]]
if exclude is not None and (not isinstance(exclude, (list, tuple, set, frozenset))):
raise TypeError('exclude should be a list, tuple or set') # depends on [control=['if'], data=[]]
values = {}
for prop in self._properties.itervalues():
name = prop._code_name
if include is not None and name not in include:
continue # depends on [control=['if'], data=[]]
if exclude is not None and name in exclude:
continue # depends on [control=['if'], data=[]]
try:
values[name] = prop._get_for_dict(self) # depends on [control=['try'], data=[]]
except UnprojectedPropertyError:
pass # Ignore unprojected properties rather than failing. # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['prop']]
return values |
def parse_string(contents, name='tmp.cpp', **kwargs):
""" Parse a string of C/C++ code
"""
idx = clang.cindex.Index.create()
tu = idx.parse(name, unsaved_files=[(name, contents)], **kwargs)
return _ensure_parse_valid(tu) | def function[parse_string, parameter[contents, name]]:
constant[ Parse a string of C/C++ code
]
variable[idx] assign[=] call[name[clang].cindex.Index.create, parameter[]]
variable[tu] assign[=] call[name[idx].parse, parameter[name[name]]]
return[call[name[_ensure_parse_valid], parameter[name[tu]]]] | keyword[def] identifier[parse_string] ( identifier[contents] , identifier[name] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[idx] = identifier[clang] . identifier[cindex] . identifier[Index] . identifier[create] ()
identifier[tu] = identifier[idx] . identifier[parse] ( identifier[name] , identifier[unsaved_files] =[( identifier[name] , identifier[contents] )],** identifier[kwargs] )
keyword[return] identifier[_ensure_parse_valid] ( identifier[tu] ) | def parse_string(contents, name='tmp.cpp', **kwargs):
""" Parse a string of C/C++ code
"""
idx = clang.cindex.Index.create()
tu = idx.parse(name, unsaved_files=[(name, contents)], **kwargs)
return _ensure_parse_valid(tu) |
def var(self):
"""
Variance value as a result of an uncertainty calculation
"""
mn = self.mean
vr = np.mean((self._mcpts - mn) ** 2)
return vr | def function[var, parameter[self]]:
constant[
Variance value as a result of an uncertainty calculation
]
variable[mn] assign[=] name[self].mean
variable[vr] assign[=] call[name[np].mean, parameter[binary_operation[binary_operation[name[self]._mcpts - name[mn]] ** constant[2]]]]
return[name[vr]] | keyword[def] identifier[var] ( identifier[self] ):
literal[string]
identifier[mn] = identifier[self] . identifier[mean]
identifier[vr] = identifier[np] . identifier[mean] (( identifier[self] . identifier[_mcpts] - identifier[mn] )** literal[int] )
keyword[return] identifier[vr] | def var(self):
"""
Variance value as a result of an uncertainty calculation
"""
mn = self.mean
vr = np.mean((self._mcpts - mn) ** 2)
return vr |
def doaniscorr(PmagSpecRec, AniSpec):
"""
takes the 6 element 's' vector and the Dec,Inc, Int 'Dir' data,
performs simple anisotropy correction. returns corrected Dec, Inc, Int
"""
AniSpecRec = {}
for key in list(PmagSpecRec.keys()):
AniSpecRec[key] = PmagSpecRec[key]
Dir = np.zeros((3), 'f')
Dir[0] = float(PmagSpecRec["specimen_dec"])
Dir[1] = float(PmagSpecRec["specimen_inc"])
Dir[2] = float(PmagSpecRec["specimen_int"])
# check if F test passes! if anisotropy_sigma available
chi, chi_inv = check_F(AniSpec)
if chi[0][0] == 1.: # isotropic
cDir = [Dir[0], Dir[1]] # no change
newint = Dir[2]
else:
X = dir2cart(Dir)
M = np.array(X)
H = np.dot(M, chi_inv)
cDir = cart2dir(H)
Hunit = [old_div(H[0], cDir[2]), old_div(H[1], cDir[2]), old_div(
H[2], cDir[2])] # unit vector parallel to Banc
Zunit = [0, 0, -1.] # unit vector parallel to lab field
Hpar = np.dot(chi, Hunit) # unit vector applied along ancient field
Zpar = np.dot(chi, Zunit) # unit vector applied along lab field
# intensity of resultant vector from ancient field
HparInt = cart2dir(Hpar)[2]
# intensity of resultant vector from lab field
ZparInt = cart2dir(Zpar)[2]
newint = Dir[2] * ZparInt / HparInt
if cDir[0] - Dir[0] > 90:
cDir[1] = -cDir[1]
cDir[0] = (cDir[0] - 180.) % 360.
AniSpecRec["specimen_dec"] = '%7.1f' % (cDir[0])
AniSpecRec["specimen_inc"] = '%7.1f' % (cDir[1])
AniSpecRec["specimen_int"] = '%9.4e' % (newint)
AniSpecRec["specimen_correction"] = 'c'
if 'magic_method_codes' in list(AniSpecRec.keys()):
methcodes = AniSpecRec["magic_method_codes"]
else:
methcodes = ""
if methcodes == "":
methcodes = "DA-AC-" + AniSpec['anisotropy_type']
if methcodes != "":
methcodes = methcodes + ":DA-AC-" + AniSpec['anisotropy_type']
if chi[0][0] == 1.: # isotropic
# indicates anisotropy was checked and no change necessary
methcodes = methcodes + ':DA-AC-ISO'
AniSpecRec["magic_method_codes"] = methcodes.strip(":")
return AniSpecRec | def function[doaniscorr, parameter[PmagSpecRec, AniSpec]]:
constant[
takes the 6 element 's' vector and the Dec,Inc, Int 'Dir' data,
performs simple anisotropy correction. returns corrected Dec, Inc, Int
]
variable[AniSpecRec] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[list], parameter[call[name[PmagSpecRec].keys, parameter[]]]]] begin[:]
call[name[AniSpecRec]][name[key]] assign[=] call[name[PmagSpecRec]][name[key]]
variable[Dir] assign[=] call[name[np].zeros, parameter[constant[3], constant[f]]]
call[name[Dir]][constant[0]] assign[=] call[name[float], parameter[call[name[PmagSpecRec]][constant[specimen_dec]]]]
call[name[Dir]][constant[1]] assign[=] call[name[float], parameter[call[name[PmagSpecRec]][constant[specimen_inc]]]]
call[name[Dir]][constant[2]] assign[=] call[name[float], parameter[call[name[PmagSpecRec]][constant[specimen_int]]]]
<ast.Tuple object at 0x7da2047e9d20> assign[=] call[name[check_F], parameter[name[AniSpec]]]
if compare[call[call[name[chi]][constant[0]]][constant[0]] equal[==] constant[1.0]] begin[:]
variable[cDir] assign[=] list[[<ast.Subscript object at 0x7da2047e8400>, <ast.Subscript object at 0x7da1b047cb50>]]
variable[newint] assign[=] call[name[Dir]][constant[2]]
call[name[AniSpecRec]][constant[specimen_dec]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> call[name[cDir]][constant[0]]]
call[name[AniSpecRec]][constant[specimen_inc]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> call[name[cDir]][constant[1]]]
call[name[AniSpecRec]][constant[specimen_int]] assign[=] binary_operation[constant[%9.4e] <ast.Mod object at 0x7da2590d6920> name[newint]]
call[name[AniSpecRec]][constant[specimen_correction]] assign[=] constant[c]
if compare[constant[magic_method_codes] in call[name[list], parameter[call[name[AniSpecRec].keys, parameter[]]]]] begin[:]
variable[methcodes] assign[=] call[name[AniSpecRec]][constant[magic_method_codes]]
if compare[name[methcodes] equal[==] constant[]] begin[:]
variable[methcodes] assign[=] binary_operation[constant[DA-AC-] + call[name[AniSpec]][constant[anisotropy_type]]]
if compare[name[methcodes] not_equal[!=] constant[]] begin[:]
variable[methcodes] assign[=] binary_operation[binary_operation[name[methcodes] + constant[:DA-AC-]] + call[name[AniSpec]][constant[anisotropy_type]]]
if compare[call[call[name[chi]][constant[0]]][constant[0]] equal[==] constant[1.0]] begin[:]
variable[methcodes] assign[=] binary_operation[name[methcodes] + constant[:DA-AC-ISO]]
call[name[AniSpecRec]][constant[magic_method_codes]] assign[=] call[name[methcodes].strip, parameter[constant[:]]]
return[name[AniSpecRec]] | keyword[def] identifier[doaniscorr] ( identifier[PmagSpecRec] , identifier[AniSpec] ):
literal[string]
identifier[AniSpecRec] ={}
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[PmagSpecRec] . identifier[keys] ()):
identifier[AniSpecRec] [ identifier[key] ]= identifier[PmagSpecRec] [ identifier[key] ]
identifier[Dir] = identifier[np] . identifier[zeros] (( literal[int] ), literal[string] )
identifier[Dir] [ literal[int] ]= identifier[float] ( identifier[PmagSpecRec] [ literal[string] ])
identifier[Dir] [ literal[int] ]= identifier[float] ( identifier[PmagSpecRec] [ literal[string] ])
identifier[Dir] [ literal[int] ]= identifier[float] ( identifier[PmagSpecRec] [ literal[string] ])
identifier[chi] , identifier[chi_inv] = identifier[check_F] ( identifier[AniSpec] )
keyword[if] identifier[chi] [ literal[int] ][ literal[int] ]== literal[int] :
identifier[cDir] =[ identifier[Dir] [ literal[int] ], identifier[Dir] [ literal[int] ]]
identifier[newint] = identifier[Dir] [ literal[int] ]
keyword[else] :
identifier[X] = identifier[dir2cart] ( identifier[Dir] )
identifier[M] = identifier[np] . identifier[array] ( identifier[X] )
identifier[H] = identifier[np] . identifier[dot] ( identifier[M] , identifier[chi_inv] )
identifier[cDir] = identifier[cart2dir] ( identifier[H] )
identifier[Hunit] =[ identifier[old_div] ( identifier[H] [ literal[int] ], identifier[cDir] [ literal[int] ]), identifier[old_div] ( identifier[H] [ literal[int] ], identifier[cDir] [ literal[int] ]), identifier[old_div] (
identifier[H] [ literal[int] ], identifier[cDir] [ literal[int] ])]
identifier[Zunit] =[ literal[int] , literal[int] ,- literal[int] ]
identifier[Hpar] = identifier[np] . identifier[dot] ( identifier[chi] , identifier[Hunit] )
identifier[Zpar] = identifier[np] . identifier[dot] ( identifier[chi] , identifier[Zunit] )
identifier[HparInt] = identifier[cart2dir] ( identifier[Hpar] )[ literal[int] ]
identifier[ZparInt] = identifier[cart2dir] ( identifier[Zpar] )[ literal[int] ]
identifier[newint] = identifier[Dir] [ literal[int] ]* identifier[ZparInt] / identifier[HparInt]
keyword[if] identifier[cDir] [ literal[int] ]- identifier[Dir] [ literal[int] ]> literal[int] :
identifier[cDir] [ literal[int] ]=- identifier[cDir] [ literal[int] ]
identifier[cDir] [ literal[int] ]=( identifier[cDir] [ literal[int] ]- literal[int] )% literal[int]
identifier[AniSpecRec] [ literal[string] ]= literal[string] %( identifier[cDir] [ literal[int] ])
identifier[AniSpecRec] [ literal[string] ]= literal[string] %( identifier[cDir] [ literal[int] ])
identifier[AniSpecRec] [ literal[string] ]= literal[string] %( identifier[newint] )
identifier[AniSpecRec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[AniSpecRec] . identifier[keys] ()):
identifier[methcodes] = identifier[AniSpecRec] [ literal[string] ]
keyword[else] :
identifier[methcodes] = literal[string]
keyword[if] identifier[methcodes] == literal[string] :
identifier[methcodes] = literal[string] + identifier[AniSpec] [ literal[string] ]
keyword[if] identifier[methcodes] != literal[string] :
identifier[methcodes] = identifier[methcodes] + literal[string] + identifier[AniSpec] [ literal[string] ]
keyword[if] identifier[chi] [ literal[int] ][ literal[int] ]== literal[int] :
identifier[methcodes] = identifier[methcodes] + literal[string]
identifier[AniSpecRec] [ literal[string] ]= identifier[methcodes] . identifier[strip] ( literal[string] )
keyword[return] identifier[AniSpecRec] | def doaniscorr(PmagSpecRec, AniSpec):
"""
takes the 6 element 's' vector and the Dec,Inc, Int 'Dir' data,
performs simple anisotropy correction. returns corrected Dec, Inc, Int
"""
AniSpecRec = {}
for key in list(PmagSpecRec.keys()):
AniSpecRec[key] = PmagSpecRec[key] # depends on [control=['for'], data=['key']]
Dir = np.zeros(3, 'f')
Dir[0] = float(PmagSpecRec['specimen_dec'])
Dir[1] = float(PmagSpecRec['specimen_inc'])
Dir[2] = float(PmagSpecRec['specimen_int'])
# check if F test passes! if anisotropy_sigma available
(chi, chi_inv) = check_F(AniSpec)
if chi[0][0] == 1.0: # isotropic
cDir = [Dir[0], Dir[1]] # no change
newint = Dir[2] # depends on [control=['if'], data=[]]
else:
X = dir2cart(Dir)
M = np.array(X)
H = np.dot(M, chi_inv)
cDir = cart2dir(H)
Hunit = [old_div(H[0], cDir[2]), old_div(H[1], cDir[2]), old_div(H[2], cDir[2])] # unit vector parallel to Banc
Zunit = [0, 0, -1.0] # unit vector parallel to lab field
Hpar = np.dot(chi, Hunit) # unit vector applied along ancient field
Zpar = np.dot(chi, Zunit) # unit vector applied along lab field
# intensity of resultant vector from ancient field
HparInt = cart2dir(Hpar)[2]
# intensity of resultant vector from lab field
ZparInt = cart2dir(Zpar)[2]
newint = Dir[2] * ZparInt / HparInt
if cDir[0] - Dir[0] > 90:
cDir[1] = -cDir[1]
cDir[0] = (cDir[0] - 180.0) % 360.0 # depends on [control=['if'], data=[]]
AniSpecRec['specimen_dec'] = '%7.1f' % cDir[0]
AniSpecRec['specimen_inc'] = '%7.1f' % cDir[1]
AniSpecRec['specimen_int'] = '%9.4e' % newint
AniSpecRec['specimen_correction'] = 'c'
if 'magic_method_codes' in list(AniSpecRec.keys()):
methcodes = AniSpecRec['magic_method_codes'] # depends on [control=['if'], data=[]]
else:
methcodes = ''
if methcodes == '':
methcodes = 'DA-AC-' + AniSpec['anisotropy_type'] # depends on [control=['if'], data=['methcodes']]
if methcodes != '':
methcodes = methcodes + ':DA-AC-' + AniSpec['anisotropy_type'] # depends on [control=['if'], data=['methcodes']]
if chi[0][0] == 1.0: # isotropic
# indicates anisotropy was checked and no change necessary
methcodes = methcodes + ':DA-AC-ISO' # depends on [control=['if'], data=[]]
AniSpecRec['magic_method_codes'] = methcodes.strip(':')
return AniSpecRec |
def parse_JSON(self, JSON_string):
"""
Parses a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance out of raw JSON data.
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None')
d = json.loads(JSON_string)
station_id = d.get('station_id', None)
ts = d.get('date', None)
if ts is not None:
ts = int(ts)
aggregated_on = d.get('type', None)
temp = d.get('temp', dict())
humidity = d.get('humidity', dict())
wind = d.get('wind', dict())
pressure = d.get('pressure', dict())
precipitation = d.get('precipitation', dict())
return AggregatedMeasurement(station_id, ts, aggregated_on, temp=temp,
humidity=humidity, wind=wind,
pressure=pressure, precipitation=precipitation) | def function[parse_JSON, parameter[self, JSON_string]]:
constant[
Parses a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance out of raw JSON data.
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
]
if compare[name[JSON_string] is constant[None]] begin[:]
<ast.Raise object at 0x7da2044c0280>
variable[d] assign[=] call[name[json].loads, parameter[name[JSON_string]]]
variable[station_id] assign[=] call[name[d].get, parameter[constant[station_id], constant[None]]]
variable[ts] assign[=] call[name[d].get, parameter[constant[date], constant[None]]]
if compare[name[ts] is_not constant[None]] begin[:]
variable[ts] assign[=] call[name[int], parameter[name[ts]]]
variable[aggregated_on] assign[=] call[name[d].get, parameter[constant[type], constant[None]]]
variable[temp] assign[=] call[name[d].get, parameter[constant[temp], call[name[dict], parameter[]]]]
variable[humidity] assign[=] call[name[d].get, parameter[constant[humidity], call[name[dict], parameter[]]]]
variable[wind] assign[=] call[name[d].get, parameter[constant[wind], call[name[dict], parameter[]]]]
variable[pressure] assign[=] call[name[d].get, parameter[constant[pressure], call[name[dict], parameter[]]]]
variable[precipitation] assign[=] call[name[d].get, parameter[constant[precipitation], call[name[dict], parameter[]]]]
return[call[name[AggregatedMeasurement], parameter[name[station_id], name[ts], name[aggregated_on]]]] | keyword[def] identifier[parse_JSON] ( identifier[self] , identifier[JSON_string] ):
literal[string]
keyword[if] identifier[JSON_string] keyword[is] keyword[None] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] ( literal[string] )
identifier[d] = identifier[json] . identifier[loads] ( identifier[JSON_string] )
identifier[station_id] = identifier[d] . identifier[get] ( literal[string] , keyword[None] )
identifier[ts] = identifier[d] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[ts] keyword[is] keyword[not] keyword[None] :
identifier[ts] = identifier[int] ( identifier[ts] )
identifier[aggregated_on] = identifier[d] . identifier[get] ( literal[string] , keyword[None] )
identifier[temp] = identifier[d] . identifier[get] ( literal[string] , identifier[dict] ())
identifier[humidity] = identifier[d] . identifier[get] ( literal[string] , identifier[dict] ())
identifier[wind] = identifier[d] . identifier[get] ( literal[string] , identifier[dict] ())
identifier[pressure] = identifier[d] . identifier[get] ( literal[string] , identifier[dict] ())
identifier[precipitation] = identifier[d] . identifier[get] ( literal[string] , identifier[dict] ())
keyword[return] identifier[AggregatedMeasurement] ( identifier[station_id] , identifier[ts] , identifier[aggregated_on] , identifier[temp] = identifier[temp] ,
identifier[humidity] = identifier[humidity] , identifier[wind] = identifier[wind] ,
identifier[pressure] = identifier[pressure] , identifier[precipitation] = identifier[precipitation] ) | def parse_JSON(self, JSON_string):
"""
Parses a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance out of raw JSON data.
:param JSON_string: a raw JSON string
:type JSON_string: str
:return: a *pyowm.stationsapi30.measurement.AggregatedMeasurement*
instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None') # depends on [control=['if'], data=[]]
d = json.loads(JSON_string)
station_id = d.get('station_id', None)
ts = d.get('date', None)
if ts is not None:
ts = int(ts) # depends on [control=['if'], data=['ts']]
aggregated_on = d.get('type', None)
temp = d.get('temp', dict())
humidity = d.get('humidity', dict())
wind = d.get('wind', dict())
pressure = d.get('pressure', dict())
precipitation = d.get('precipitation', dict())
return AggregatedMeasurement(station_id, ts, aggregated_on, temp=temp, humidity=humidity, wind=wind, pressure=pressure, precipitation=precipitation) |
def forward(*args, **kwargs):
"""Similar to :meth:`invoke` but fills in default keyword
arguments from the current context if the other command expects
it. This cannot invoke callbacks directly, only other commands.
"""
self, cmd = args[:2]
# It's also possible to invoke another command which might or
# might not have a callback.
if not isinstance(cmd, Command):
raise TypeError('Callback is not a command.')
for param in self.params:
if param not in kwargs:
kwargs[param] = self.params[param]
return self.invoke(cmd, **kwargs) | def function[forward, parameter[]]:
constant[Similar to :meth:`invoke` but fills in default keyword
arguments from the current context if the other command expects
it. This cannot invoke callbacks directly, only other commands.
]
<ast.Tuple object at 0x7da2041daec0> assign[=] call[name[args]][<ast.Slice object at 0x7da2041da530>]
if <ast.UnaryOp object at 0x7da2041db8e0> begin[:]
<ast.Raise object at 0x7da2041d89a0>
for taget[name[param]] in starred[name[self].params] begin[:]
if compare[name[param] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][name[param]] assign[=] call[name[self].params][name[param]]
return[call[name[self].invoke, parameter[name[cmd]]]] | keyword[def] identifier[forward] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] , identifier[cmd] = identifier[args] [: literal[int] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[cmd] , identifier[Command] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[params] :
keyword[if] identifier[param] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ identifier[param] ]= identifier[self] . identifier[params] [ identifier[param] ]
keyword[return] identifier[self] . identifier[invoke] ( identifier[cmd] ,** identifier[kwargs] ) | def forward(*args, **kwargs):
"""Similar to :meth:`invoke` but fills in default keyword
arguments from the current context if the other command expects
it. This cannot invoke callbacks directly, only other commands.
"""
(self, cmd) = args[:2]
# It's also possible to invoke another command which might or
# might not have a callback.
if not isinstance(cmd, Command):
raise TypeError('Callback is not a command.') # depends on [control=['if'], data=[]]
for param in self.params:
if param not in kwargs:
kwargs[param] = self.params[param] # depends on [control=['if'], data=['param', 'kwargs']] # depends on [control=['for'], data=['param']]
return self.invoke(cmd, **kwargs) |
def get_schedule_distribution(schedule, global_step=None):
"""Computes the pmf of a schedule given the global_step.
Args:
schedule: A schedule tuple, see encode_schedule for details.
global_step: A scalar tensor, the step to query the schedule.
Returns:
A 1-D tensor of probs, the sampling distribution of the global_step.
"""
interpolation, steps, pmfs = schedule
if len(pmfs) == 1:
# py_func doesn't seem to work on TPU - at least get the constant case to
# run.
# TODO(noam): get the general case working.
return pmfs[0]
if global_step is None:
global_step = tf.train.get_or_create_global_step()
if interpolation == 'step':
interpolation_fn = step_interpolation
elif interpolation == 'linear':
interpolation_fn = linear_interpolation
else:
raise ValueError('Invalid interpolation strategy: %s' % interpolation)
return tf.reshape(
tf.py_func(
func=lambda x: interpolation_fn(x, np.array(steps), np.array(pmfs)),
inp=[global_step], Tout=tf.float32), [len(pmfs[0])]) | def function[get_schedule_distribution, parameter[schedule, global_step]]:
constant[Computes the pmf of a schedule given the global_step.
Args:
schedule: A schedule tuple, see encode_schedule for details.
global_step: A scalar tensor, the step to query the schedule.
Returns:
A 1-D tensor of probs, the sampling distribution of the global_step.
]
<ast.Tuple object at 0x7da1b1e15390> assign[=] name[schedule]
if compare[call[name[len], parameter[name[pmfs]]] equal[==] constant[1]] begin[:]
return[call[name[pmfs]][constant[0]]]
if compare[name[global_step] is constant[None]] begin[:]
variable[global_step] assign[=] call[name[tf].train.get_or_create_global_step, parameter[]]
if compare[name[interpolation] equal[==] constant[step]] begin[:]
variable[interpolation_fn] assign[=] name[step_interpolation]
return[call[name[tf].reshape, parameter[call[name[tf].py_func, parameter[]], list[[<ast.Call object at 0x7da2047e8be0>]]]]] | keyword[def] identifier[get_schedule_distribution] ( identifier[schedule] , identifier[global_step] = keyword[None] ):
literal[string]
identifier[interpolation] , identifier[steps] , identifier[pmfs] = identifier[schedule]
keyword[if] identifier[len] ( identifier[pmfs] )== literal[int] :
keyword[return] identifier[pmfs] [ literal[int] ]
keyword[if] identifier[global_step] keyword[is] keyword[None] :
identifier[global_step] = identifier[tf] . identifier[train] . identifier[get_or_create_global_step] ()
keyword[if] identifier[interpolation] == literal[string] :
identifier[interpolation_fn] = identifier[step_interpolation]
keyword[elif] identifier[interpolation] == literal[string] :
identifier[interpolation_fn] = identifier[linear_interpolation]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[interpolation] )
keyword[return] identifier[tf] . identifier[reshape] (
identifier[tf] . identifier[py_func] (
identifier[func] = keyword[lambda] identifier[x] : identifier[interpolation_fn] ( identifier[x] , identifier[np] . identifier[array] ( identifier[steps] ), identifier[np] . identifier[array] ( identifier[pmfs] )),
identifier[inp] =[ identifier[global_step] ], identifier[Tout] = identifier[tf] . identifier[float32] ),[ identifier[len] ( identifier[pmfs] [ literal[int] ])]) | def get_schedule_distribution(schedule, global_step=None):
"""Computes the pmf of a schedule given the global_step.
Args:
schedule: A schedule tuple, see encode_schedule for details.
global_step: A scalar tensor, the step to query the schedule.
Returns:
A 1-D tensor of probs, the sampling distribution of the global_step.
"""
(interpolation, steps, pmfs) = schedule
if len(pmfs) == 1:
# py_func doesn't seem to work on TPU - at least get the constant case to
# run.
# TODO(noam): get the general case working.
return pmfs[0] # depends on [control=['if'], data=[]]
if global_step is None:
global_step = tf.train.get_or_create_global_step() # depends on [control=['if'], data=['global_step']]
if interpolation == 'step':
interpolation_fn = step_interpolation # depends on [control=['if'], data=[]]
elif interpolation == 'linear':
interpolation_fn = linear_interpolation # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid interpolation strategy: %s' % interpolation)
return tf.reshape(tf.py_func(func=lambda x: interpolation_fn(x, np.array(steps), np.array(pmfs)), inp=[global_step], Tout=tf.float32), [len(pmfs[0])]) |
def grid_arange(bounds, step):
"""
Return a grid from an (2,dimension) bounds with samples step distance apart.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
step: float, or (dimension) floats, separation between points
Returns
-------
grid: (n, dimension), points inside the specified bounds
"""
bounds = np.asanyarray(bounds, dtype=np.float64)
if len(bounds) != 2:
raise ValueError('bounds must be (2, dimension!')
# allow single float or per-dimension spacing
step = np.asanyarray(step, dtype=np.float64)
if step.shape == ():
step = np.tile(step, bounds.shape[1])
grid_elements = [np.arange(*b, step=s) for b, s in zip(bounds.T, step)]
grid = np.vstack(np.meshgrid(*grid_elements)
).reshape(bounds.shape[1], -1).T
return grid | def function[grid_arange, parameter[bounds, step]]:
constant[
Return a grid from an (2,dimension) bounds with samples step distance apart.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
step: float, or (dimension) floats, separation between points
Returns
-------
grid: (n, dimension), points inside the specified bounds
]
variable[bounds] assign[=] call[name[np].asanyarray, parameter[name[bounds]]]
if compare[call[name[len], parameter[name[bounds]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da18f811180>
variable[step] assign[=] call[name[np].asanyarray, parameter[name[step]]]
if compare[name[step].shape equal[==] tuple[[]]] begin[:]
variable[step] assign[=] call[name[np].tile, parameter[name[step], call[name[bounds].shape][constant[1]]]]
variable[grid_elements] assign[=] <ast.ListComp object at 0x7da1b2389870>
variable[grid] assign[=] call[call[name[np].vstack, parameter[call[name[np].meshgrid, parameter[<ast.Starred object at 0x7da2044c0bb0>]]]].reshape, parameter[call[name[bounds].shape][constant[1]], <ast.UnaryOp object at 0x7da2044c1cc0>]].T
return[name[grid]] | keyword[def] identifier[grid_arange] ( identifier[bounds] , identifier[step] ):
literal[string]
identifier[bounds] = identifier[np] . identifier[asanyarray] ( identifier[bounds] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[len] ( identifier[bounds] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[step] = identifier[np] . identifier[asanyarray] ( identifier[step] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[step] . identifier[shape] ==():
identifier[step] = identifier[np] . identifier[tile] ( identifier[step] , identifier[bounds] . identifier[shape] [ literal[int] ])
identifier[grid_elements] =[ identifier[np] . identifier[arange] (* identifier[b] , identifier[step] = identifier[s] ) keyword[for] identifier[b] , identifier[s] keyword[in] identifier[zip] ( identifier[bounds] . identifier[T] , identifier[step] )]
identifier[grid] = identifier[np] . identifier[vstack] ( identifier[np] . identifier[meshgrid] (* identifier[grid_elements] )
). identifier[reshape] ( identifier[bounds] . identifier[shape] [ literal[int] ],- literal[int] ). identifier[T]
keyword[return] identifier[grid] | def grid_arange(bounds, step):
"""
Return a grid from an (2,dimension) bounds with samples step distance apart.
Parameters
---------
bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]]
step: float, or (dimension) floats, separation between points
Returns
-------
grid: (n, dimension), points inside the specified bounds
"""
bounds = np.asanyarray(bounds, dtype=np.float64)
if len(bounds) != 2:
raise ValueError('bounds must be (2, dimension!') # depends on [control=['if'], data=[]]
# allow single float or per-dimension spacing
step = np.asanyarray(step, dtype=np.float64)
if step.shape == ():
step = np.tile(step, bounds.shape[1]) # depends on [control=['if'], data=[]]
grid_elements = [np.arange(*b, step=s) for (b, s) in zip(bounds.T, step)]
grid = np.vstack(np.meshgrid(*grid_elements)).reshape(bounds.shape[1], -1).T
return grid |
def get_all_resource_attributes_in_network(attr_id, network_id, **kwargs):
"""
Find every resource attribute in the network matching the supplied attr_id
"""
user_id = kwargs.get('user_id')
try:
a = db.DBSession.query(Attr).filter(Attr.id == attr_id).one()
except NoResultFound:
raise HydraError("Attribute %s not found"%(attr_id,))
ra_qry = db.DBSession.query(ResourceAttr).filter(
ResourceAttr.attr_id==attr_id,
or_(Network.id == network_id,
Node.network_id==network_id,
Link.network_id==network_id,
ResourceGroup.network_id==network_id)
).outerjoin('node')\
.outerjoin('link')\
.outerjoin('network')\
.outerjoin('resourcegroup')\
.options(joinedload_all('node'))\
.options(joinedload_all('link'))\
.options(joinedload_all('resourcegroup'))\
.options(joinedload_all('network'))
resourceattrs = ra_qry.all()
json_ra = []
#Load the metadata too
for ra in resourceattrs:
ra_j = JSONObject(ra, extras={'node':JSONObject(ra.node) if ra.node else None,
'link':JSONObject(ra.link) if ra.link else None,
'resourcegroup':JSONObject(ra.resourcegroup) if ra.resourcegroup else None,
'network':JSONObject(ra.network) if ra.network else None})
if ra_j.node is not None:
ra_j.resource = ra_j.node
elif ra_j.link is not None:
ra_j.resource = ra_j.link
elif ra_j.resourcegroup is not None:
ra_j.resource = ra_j.resourcegroup
elif ra.network is not None:
ra_j.resource = ra_j.network
json_ra.append(ra_j)
return json_ra | def function[get_all_resource_attributes_in_network, parameter[attr_id, network_id]]:
constant[
Find every resource attribute in the network matching the supplied attr_id
]
variable[user_id] assign[=] call[name[kwargs].get, parameter[constant[user_id]]]
<ast.Try object at 0x7da20cabc460>
variable[ra_qry] assign[=] call[call[call[call[call[call[call[call[call[call[name[db].DBSession.query, parameter[name[ResourceAttr]]].filter, parameter[compare[name[ResourceAttr].attr_id equal[==] name[attr_id]], call[name[or_], parameter[compare[name[Network].id equal[==] name[network_id]], compare[name[Node].network_id equal[==] name[network_id]], compare[name[Link].network_id equal[==] name[network_id]], compare[name[ResourceGroup].network_id equal[==] name[network_id]]]]]].outerjoin, parameter[constant[node]]].outerjoin, parameter[constant[link]]].outerjoin, parameter[constant[network]]].outerjoin, parameter[constant[resourcegroup]]].options, parameter[call[name[joinedload_all], parameter[constant[node]]]]].options, parameter[call[name[joinedload_all], parameter[constant[link]]]]].options, parameter[call[name[joinedload_all], parameter[constant[resourcegroup]]]]].options, parameter[call[name[joinedload_all], parameter[constant[network]]]]]
variable[resourceattrs] assign[=] call[name[ra_qry].all, parameter[]]
variable[json_ra] assign[=] list[[]]
for taget[name[ra]] in starred[name[resourceattrs]] begin[:]
variable[ra_j] assign[=] call[name[JSONObject], parameter[name[ra]]]
if compare[name[ra_j].node is_not constant[None]] begin[:]
name[ra_j].resource assign[=] name[ra_j].node
call[name[json_ra].append, parameter[name[ra_j]]]
return[name[json_ra]] | keyword[def] identifier[get_all_resource_attributes_in_network] ( identifier[attr_id] , identifier[network_id] ,** identifier[kwargs] ):
literal[string]
identifier[user_id] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[try] :
identifier[a] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Attr] ). identifier[filter] ( identifier[Attr] . identifier[id] == identifier[attr_id] ). identifier[one] ()
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[HydraError] ( literal[string] %( identifier[attr_id] ,))
identifier[ra_qry] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[ResourceAttr] ). identifier[filter] (
identifier[ResourceAttr] . identifier[attr_id] == identifier[attr_id] ,
identifier[or_] ( identifier[Network] . identifier[id] == identifier[network_id] ,
identifier[Node] . identifier[network_id] == identifier[network_id] ,
identifier[Link] . identifier[network_id] == identifier[network_id] ,
identifier[ResourceGroup] . identifier[network_id] == identifier[network_id] )
). identifier[outerjoin] ( literal[string] ). identifier[outerjoin] ( literal[string] ). identifier[outerjoin] ( literal[string] ). identifier[outerjoin] ( literal[string] ). identifier[options] ( identifier[joinedload_all] ( literal[string] )). identifier[options] ( identifier[joinedload_all] ( literal[string] )). identifier[options] ( identifier[joinedload_all] ( literal[string] )). identifier[options] ( identifier[joinedload_all] ( literal[string] ))
identifier[resourceattrs] = identifier[ra_qry] . identifier[all] ()
identifier[json_ra] =[]
keyword[for] identifier[ra] keyword[in] identifier[resourceattrs] :
identifier[ra_j] = identifier[JSONObject] ( identifier[ra] , identifier[extras] ={ literal[string] : identifier[JSONObject] ( identifier[ra] . identifier[node] ) keyword[if] identifier[ra] . identifier[node] keyword[else] keyword[None] ,
literal[string] : identifier[JSONObject] ( identifier[ra] . identifier[link] ) keyword[if] identifier[ra] . identifier[link] keyword[else] keyword[None] ,
literal[string] : identifier[JSONObject] ( identifier[ra] . identifier[resourcegroup] ) keyword[if] identifier[ra] . identifier[resourcegroup] keyword[else] keyword[None] ,
literal[string] : identifier[JSONObject] ( identifier[ra] . identifier[network] ) keyword[if] identifier[ra] . identifier[network] keyword[else] keyword[None] })
keyword[if] identifier[ra_j] . identifier[node] keyword[is] keyword[not] keyword[None] :
identifier[ra_j] . identifier[resource] = identifier[ra_j] . identifier[node]
keyword[elif] identifier[ra_j] . identifier[link] keyword[is] keyword[not] keyword[None] :
identifier[ra_j] . identifier[resource] = identifier[ra_j] . identifier[link]
keyword[elif] identifier[ra_j] . identifier[resourcegroup] keyword[is] keyword[not] keyword[None] :
identifier[ra_j] . identifier[resource] = identifier[ra_j] . identifier[resourcegroup]
keyword[elif] identifier[ra] . identifier[network] keyword[is] keyword[not] keyword[None] :
identifier[ra_j] . identifier[resource] = identifier[ra_j] . identifier[network]
identifier[json_ra] . identifier[append] ( identifier[ra_j] )
keyword[return] identifier[json_ra] | def get_all_resource_attributes_in_network(attr_id, network_id, **kwargs):
"""
Find every resource attribute in the network matching the supplied attr_id
"""
user_id = kwargs.get('user_id')
try:
a = db.DBSession.query(Attr).filter(Attr.id == attr_id).one() # depends on [control=['try'], data=[]]
except NoResultFound:
raise HydraError('Attribute %s not found' % (attr_id,)) # depends on [control=['except'], data=[]]
ra_qry = db.DBSession.query(ResourceAttr).filter(ResourceAttr.attr_id == attr_id, or_(Network.id == network_id, Node.network_id == network_id, Link.network_id == network_id, ResourceGroup.network_id == network_id)).outerjoin('node').outerjoin('link').outerjoin('network').outerjoin('resourcegroup').options(joinedload_all('node')).options(joinedload_all('link')).options(joinedload_all('resourcegroup')).options(joinedload_all('network'))
resourceattrs = ra_qry.all()
json_ra = []
#Load the metadata too
for ra in resourceattrs:
ra_j = JSONObject(ra, extras={'node': JSONObject(ra.node) if ra.node else None, 'link': JSONObject(ra.link) if ra.link else None, 'resourcegroup': JSONObject(ra.resourcegroup) if ra.resourcegroup else None, 'network': JSONObject(ra.network) if ra.network else None})
if ra_j.node is not None:
ra_j.resource = ra_j.node # depends on [control=['if'], data=[]]
elif ra_j.link is not None:
ra_j.resource = ra_j.link # depends on [control=['if'], data=[]]
elif ra_j.resourcegroup is not None:
ra_j.resource = ra_j.resourcegroup # depends on [control=['if'], data=[]]
elif ra.network is not None:
ra_j.resource = ra_j.network # depends on [control=['if'], data=[]]
json_ra.append(ra_j) # depends on [control=['for'], data=['ra']]
return json_ra |
def sanitize(self):
'''
Check if the current settings conform to the LISP specifications and
fix them where possible.
'''
# We override the MapRegisterMessage sa
super(InfoMessage, self).sanitize()
# R: R bit indicates this is a reply to an Info-Request (Info-
# Reply). R bit is set to 0 in an Info-Request. When R bit is set
# to 0, the AFI field (following the EID-prefix field) must be set
# to 0. When R bit is set to 1, the packet contents follow the
# format for an Info-Reply as described below.
if not isinstance(self.is_reply, bool):
raise ValueError('Is-reply flag must be a boolean')
# Nonce: An 8-byte random value created by the sender of the Info-
# Request. This nonce will be returned in the Info-Reply. The
# nonce SHOULD be generated by a properly seeded pseudo-random (or
# strong random) source.
if len(bytes(self.nonce)) != 8:
raise ValueError('Invalid nonce')
# Key ID: A configured ID to find the configured Message
# Authentication Code (MAC) algorithm and key value used for the
# authentication function. See Section 14.4 for codepoint
# assignments.
if self.key_id not in (KEY_ID_NONE, KEY_ID_HMAC_SHA_1_96,
KEY_ID_HMAC_SHA_256_128):
raise ValueError('Invalid Key ID')
# Authentication Data: The message digest used from the output of the
# Message Authentication Code (MAC) algorithm. The entire Map-
# Register payload is authenticated with this field preset to 0.
# After the MAC is computed, it is placed in this field.
# Implementations of this specification MUST include support for
# HMAC-SHA-1-96 [RFC2404] and support for HMAC-SHA-256-128 [RFC6234]
# is RECOMMENDED.
if not isinstance(self.authentication_data, bytes):
raise ValueError('Invalid authentication data')
# TTL: The time in minutes the recipient of the Info-Reply will
# store the RTR Information.
if not isinstance(self.ttl, numbers.Integral) \
or self.ttl < 0 or self.ttl > 0xffffffff:
raise ValueError('Invalid TTL')
# EID-prefix: 4 octets if an IPv4 address-family, 16 octets if an IPv6
# address-family.
if not isinstance(self.eid_prefix, (IPv4Network, IPv6Network)):
raise ValueError('EID prefix must be IPv4 or IPv6')
# When a Map-Server receives an Info-Request message, it responds with
# an Info-Reply message. The Info-Reply message source port is 4342,
# and destination port is taken from the source port of the triggering
# Info-Request. Map-Server fills the NAT LCAF (LCAF Type = 7) fields
# according to their description. The Map-Server uses AFI=0 for the
# Private ETR RLOC Address field in the NAT LCAF.
if self.is_reply:
if not isinstance(self.reply, LCAFNATTraversalAddress):
raise ValueError("An InfoMessage which is an Info-Reply must contain an LCAFNATTraversalAddress")
else:
if self.reply is not None:
raise ValueError("An InfoMessage which is an Info-Request can not contain a reply") | def function[sanitize, parameter[self]]:
constant[
Check if the current settings conform to the LISP specifications and
fix them where possible.
]
call[call[name[super], parameter[name[InfoMessage], name[self]]].sanitize, parameter[]]
if <ast.UnaryOp object at 0x7da20e954f40> begin[:]
<ast.Raise object at 0x7da20e956950>
if compare[call[name[len], parameter[call[name[bytes], parameter[name[self].nonce]]]] not_equal[!=] constant[8]] begin[:]
<ast.Raise object at 0x7da20e954250>
if compare[name[self].key_id <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Name object at 0x7da1b0924070>, <ast.Name object at 0x7da1b0927ca0>, <ast.Name object at 0x7da1b09257e0>]]] begin[:]
<ast.Raise object at 0x7da1b09260e0>
if <ast.UnaryOp object at 0x7da1b0925840> begin[:]
<ast.Raise object at 0x7da1b09274c0>
if <ast.BoolOp object at 0x7da1b09269e0> begin[:]
<ast.Raise object at 0x7da1b0925540>
if <ast.UnaryOp object at 0x7da1b0926ec0> begin[:]
<ast.Raise object at 0x7da2047eb850>
if name[self].is_reply begin[:]
if <ast.UnaryOp object at 0x7da204621060> begin[:]
<ast.Raise object at 0x7da204622350> | keyword[def] identifier[sanitize] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[InfoMessage] , identifier[self] ). identifier[sanitize] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[is_reply] , identifier[bool] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[bytes] ( identifier[self] . identifier[nonce] ))!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[key_id] keyword[not] keyword[in] ( identifier[KEY_ID_NONE] , identifier[KEY_ID_HMAC_SHA_1_96] ,
identifier[KEY_ID_HMAC_SHA_256_128] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[authentication_data] , identifier[bytes] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[ttl] , identifier[numbers] . identifier[Integral] ) keyword[or] identifier[self] . identifier[ttl] < literal[int] keyword[or] identifier[self] . identifier[ttl] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[eid_prefix] ,( identifier[IPv4Network] , identifier[IPv6Network] )):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[is_reply] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[reply] , identifier[LCAFNATTraversalAddress] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[if] identifier[self] . identifier[reply] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def sanitize(self):
"""
Check if the current settings conform to the LISP specifications and
fix them where possible.
"""
# We override the MapRegisterMessage sa
super(InfoMessage, self).sanitize()
# R: R bit indicates this is a reply to an Info-Request (Info-
# Reply). R bit is set to 0 in an Info-Request. When R bit is set
# to 0, the AFI field (following the EID-prefix field) must be set
# to 0. When R bit is set to 1, the packet contents follow the
# format for an Info-Reply as described below.
if not isinstance(self.is_reply, bool):
raise ValueError('Is-reply flag must be a boolean') # depends on [control=['if'], data=[]]
# Nonce: An 8-byte random value created by the sender of the Info-
# Request. This nonce will be returned in the Info-Reply. The
# nonce SHOULD be generated by a properly seeded pseudo-random (or
# strong random) source.
if len(bytes(self.nonce)) != 8:
raise ValueError('Invalid nonce') # depends on [control=['if'], data=[]]
# Key ID: A configured ID to find the configured Message
# Authentication Code (MAC) algorithm and key value used for the
# authentication function. See Section 14.4 for codepoint
# assignments.
if self.key_id not in (KEY_ID_NONE, KEY_ID_HMAC_SHA_1_96, KEY_ID_HMAC_SHA_256_128):
raise ValueError('Invalid Key ID') # depends on [control=['if'], data=[]]
# Authentication Data: The message digest used from the output of the
# Message Authentication Code (MAC) algorithm. The entire Map-
# Register payload is authenticated with this field preset to 0.
# After the MAC is computed, it is placed in this field.
# Implementations of this specification MUST include support for
# HMAC-SHA-1-96 [RFC2404] and support for HMAC-SHA-256-128 [RFC6234]
# is RECOMMENDED.
if not isinstance(self.authentication_data, bytes):
raise ValueError('Invalid authentication data') # depends on [control=['if'], data=[]]
# TTL: The time in minutes the recipient of the Info-Reply will
# store the RTR Information.
if not isinstance(self.ttl, numbers.Integral) or self.ttl < 0 or self.ttl > 4294967295:
raise ValueError('Invalid TTL') # depends on [control=['if'], data=[]]
# EID-prefix: 4 octets if an IPv4 address-family, 16 octets if an IPv6
# address-family.
if not isinstance(self.eid_prefix, (IPv4Network, IPv6Network)):
raise ValueError('EID prefix must be IPv4 or IPv6') # depends on [control=['if'], data=[]]
# When a Map-Server receives an Info-Request message, it responds with
# an Info-Reply message. The Info-Reply message source port is 4342,
# and destination port is taken from the source port of the triggering
# Info-Request. Map-Server fills the NAT LCAF (LCAF Type = 7) fields
# according to their description. The Map-Server uses AFI=0 for the
# Private ETR RLOC Address field in the NAT LCAF.
if self.is_reply:
if not isinstance(self.reply, LCAFNATTraversalAddress):
raise ValueError('An InfoMessage which is an Info-Reply must contain an LCAFNATTraversalAddress') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.reply is not None:
raise ValueError('An InfoMessage which is an Info-Request can not contain a reply') # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.