code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def _deserialize_from_store(profile):
"""
Takes data from the store and integrates into the application.
"""
# we first serialize to avoid deserialization merge conflicts
_serialize_into_store(profile)
fk_cache = {}
with transaction.atomic():
syncable_dict = _profile_models[profile]
excluded_list = []
# iterate through classes which are in foreign key dependency order
for model_name, klass_model in six.iteritems(syncable_dict):
# handle cases where a class has a single FK reference to itself
self_ref_fk = _self_referential_fk(klass_model)
query = Q(model_name=klass_model.morango_model_name)
for klass in klass_model.morango_model_dependencies:
query |= Q(model_name=klass.morango_model_name)
if self_ref_fk:
clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list()
dirty_children = Store.objects.filter(dirty_bit=True, profile=profile) \
.filter(Q(_self_ref_fk__in=clean_parents) | Q(_self_ref_fk='')).filter(query)
# keep iterating until size of dirty_children is 0
while len(dirty_children) > 0:
for store_model in dirty_children:
try:
app_model = store_model._deserialize_store_model(fk_cache)
if app_model:
with mute_signals(signals.pre_save, signals.post_save):
app_model.save(update_dirty_bit_to=False)
# we update a store model after we have deserialized it to be able to mark it as a clean parent
store_model.dirty_bit = False
store_model.save(update_fields=['dirty_bit'])
except exceptions.ValidationError:
# if the app model did not validate, we leave the store dirty bit set
excluded_list.append(store_model.id)
# update lists with new clean parents and dirty children
clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list()
dirty_children = Store.objects.filter(dirty_bit=True, profile=profile, _self_ref_fk__in=clean_parents).filter(query)
else:
# array for holding db values from the fields of each model for this class
db_values = []
fields = klass_model._meta.fields
for store_model in Store.objects.filter(model_name=model_name, profile=profile, dirty_bit=True):
try:
app_model = store_model._deserialize_store_model(fk_cache)
# if the model was not deleted add its field values to the list
if app_model:
for f in fields:
value = getattr(app_model, f.attname)
db_value = f.get_db_prep_value(value, connection)
db_values.append(db_value)
except exceptions.ValidationError:
# if the app model did not validate, we leave the store dirty bit set
excluded_list.append(store_model.id)
if db_values:
# number of rows to update
num_of_rows = len(db_values) // len(fields)
# create '%s' placeholders for a single row
placeholder_tuple = tuple(['%s' for _ in range(len(fields))])
# create list of the '%s' tuple placeholders based on number of rows to update
placeholder_list = [str(placeholder_tuple) for _ in range(num_of_rows)]
with connection.cursor() as cursor:
DBBackend._bulk_insert_into_app_models(cursor, klass_model._meta.db_table, fields, db_values, placeholder_list)
# clear dirty bit for all store models for this profile except for models that did not validate
Store.objects.exclude(id__in=excluded_list).filter(profile=profile, dirty_bit=True).update(dirty_bit=False)
|
def function[_deserialize_from_store, parameter[profile]]:
constant[
Takes data from the store and integrates into the application.
]
call[name[_serialize_into_store], parameter[name[profile]]]
variable[fk_cache] assign[=] dictionary[[], []]
with call[name[transaction].atomic, parameter[]] begin[:]
variable[syncable_dict] assign[=] call[name[_profile_models]][name[profile]]
variable[excluded_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b003d4e0>, <ast.Name object at 0x7da1b003d510>]]] in starred[call[name[six].iteritems, parameter[name[syncable_dict]]]] begin[:]
variable[self_ref_fk] assign[=] call[name[_self_referential_fk], parameter[name[klass_model]]]
variable[query] assign[=] call[name[Q], parameter[]]
for taget[name[klass]] in starred[name[klass_model].morango_model_dependencies] begin[:]
<ast.AugAssign object at 0x7da1b003fb20>
if name[self_ref_fk] begin[:]
variable[clean_parents] assign[=] call[call[call[name[Store].objects.filter, parameter[]].filter, parameter[name[query]]].char_ids_list, parameter[]]
variable[dirty_children] assign[=] call[call[call[name[Store].objects.filter, parameter[]].filter, parameter[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]]]].filter, parameter[name[query]]]
while compare[call[name[len], parameter[name[dirty_children]]] greater[>] constant[0]] begin[:]
for taget[name[store_model]] in starred[name[dirty_children]] begin[:]
<ast.Try object at 0x7da1b003c880>
variable[clean_parents] assign[=] call[call[call[name[Store].objects.filter, parameter[]].filter, parameter[name[query]]].char_ids_list, parameter[]]
variable[dirty_children] assign[=] call[call[name[Store].objects.filter, parameter[]].filter, parameter[name[query]]]
call[call[call[name[Store].objects.exclude, parameter[]].filter, parameter[]].update, parameter[]]
|
keyword[def] identifier[_deserialize_from_store] ( identifier[profile] ):
literal[string]
identifier[_serialize_into_store] ( identifier[profile] )
identifier[fk_cache] ={}
keyword[with] identifier[transaction] . identifier[atomic] ():
identifier[syncable_dict] = identifier[_profile_models] [ identifier[profile] ]
identifier[excluded_list] =[]
keyword[for] identifier[model_name] , identifier[klass_model] keyword[in] identifier[six] . identifier[iteritems] ( identifier[syncable_dict] ):
identifier[self_ref_fk] = identifier[_self_referential_fk] ( identifier[klass_model] )
identifier[query] = identifier[Q] ( identifier[model_name] = identifier[klass_model] . identifier[morango_model_name] )
keyword[for] identifier[klass] keyword[in] identifier[klass_model] . identifier[morango_model_dependencies] :
identifier[query] |= identifier[Q] ( identifier[model_name] = identifier[klass] . identifier[morango_model_name] )
keyword[if] identifier[self_ref_fk] :
identifier[clean_parents] = identifier[Store] . identifier[objects] . identifier[filter] ( identifier[dirty_bit] = keyword[False] , identifier[profile] = identifier[profile] ). identifier[filter] ( identifier[query] ). identifier[char_ids_list] ()
identifier[dirty_children] = identifier[Store] . identifier[objects] . identifier[filter] ( identifier[dirty_bit] = keyword[True] , identifier[profile] = identifier[profile] ). identifier[filter] ( identifier[Q] ( identifier[_self_ref_fk__in] = identifier[clean_parents] )| identifier[Q] ( identifier[_self_ref_fk] = literal[string] )). identifier[filter] ( identifier[query] )
keyword[while] identifier[len] ( identifier[dirty_children] )> literal[int] :
keyword[for] identifier[store_model] keyword[in] identifier[dirty_children] :
keyword[try] :
identifier[app_model] = identifier[store_model] . identifier[_deserialize_store_model] ( identifier[fk_cache] )
keyword[if] identifier[app_model] :
keyword[with] identifier[mute_signals] ( identifier[signals] . identifier[pre_save] , identifier[signals] . identifier[post_save] ):
identifier[app_model] . identifier[save] ( identifier[update_dirty_bit_to] = keyword[False] )
identifier[store_model] . identifier[dirty_bit] = keyword[False]
identifier[store_model] . identifier[save] ( identifier[update_fields] =[ literal[string] ])
keyword[except] identifier[exceptions] . identifier[ValidationError] :
identifier[excluded_list] . identifier[append] ( identifier[store_model] . identifier[id] )
identifier[clean_parents] = identifier[Store] . identifier[objects] . identifier[filter] ( identifier[dirty_bit] = keyword[False] , identifier[profile] = identifier[profile] ). identifier[filter] ( identifier[query] ). identifier[char_ids_list] ()
identifier[dirty_children] = identifier[Store] . identifier[objects] . identifier[filter] ( identifier[dirty_bit] = keyword[True] , identifier[profile] = identifier[profile] , identifier[_self_ref_fk__in] = identifier[clean_parents] ). identifier[filter] ( identifier[query] )
keyword[else] :
identifier[db_values] =[]
identifier[fields] = identifier[klass_model] . identifier[_meta] . identifier[fields]
keyword[for] identifier[store_model] keyword[in] identifier[Store] . identifier[objects] . identifier[filter] ( identifier[model_name] = identifier[model_name] , identifier[profile] = identifier[profile] , identifier[dirty_bit] = keyword[True] ):
keyword[try] :
identifier[app_model] = identifier[store_model] . identifier[_deserialize_store_model] ( identifier[fk_cache] )
keyword[if] identifier[app_model] :
keyword[for] identifier[f] keyword[in] identifier[fields] :
identifier[value] = identifier[getattr] ( identifier[app_model] , identifier[f] . identifier[attname] )
identifier[db_value] = identifier[f] . identifier[get_db_prep_value] ( identifier[value] , identifier[connection] )
identifier[db_values] . identifier[append] ( identifier[db_value] )
keyword[except] identifier[exceptions] . identifier[ValidationError] :
identifier[excluded_list] . identifier[append] ( identifier[store_model] . identifier[id] )
keyword[if] identifier[db_values] :
identifier[num_of_rows] = identifier[len] ( identifier[db_values] )// identifier[len] ( identifier[fields] )
identifier[placeholder_tuple] = identifier[tuple] ([ literal[string] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[fields] ))])
identifier[placeholder_list] =[ identifier[str] ( identifier[placeholder_tuple] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_of_rows] )]
keyword[with] identifier[connection] . identifier[cursor] () keyword[as] identifier[cursor] :
identifier[DBBackend] . identifier[_bulk_insert_into_app_models] ( identifier[cursor] , identifier[klass_model] . identifier[_meta] . identifier[db_table] , identifier[fields] , identifier[db_values] , identifier[placeholder_list] )
identifier[Store] . identifier[objects] . identifier[exclude] ( identifier[id__in] = identifier[excluded_list] ). identifier[filter] ( identifier[profile] = identifier[profile] , identifier[dirty_bit] = keyword[True] ). identifier[update] ( identifier[dirty_bit] = keyword[False] )
|
def _deserialize_from_store(profile):
"""
Takes data from the store and integrates into the application.
"""
# we first serialize to avoid deserialization merge conflicts
_serialize_into_store(profile)
fk_cache = {}
with transaction.atomic():
syncable_dict = _profile_models[profile]
excluded_list = []
# iterate through classes which are in foreign key dependency order
for (model_name, klass_model) in six.iteritems(syncable_dict):
# handle cases where a class has a single FK reference to itself
self_ref_fk = _self_referential_fk(klass_model)
query = Q(model_name=klass_model.morango_model_name)
for klass in klass_model.morango_model_dependencies:
query |= Q(model_name=klass.morango_model_name) # depends on [control=['for'], data=['klass']]
if self_ref_fk:
clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list()
dirty_children = Store.objects.filter(dirty_bit=True, profile=profile).filter(Q(_self_ref_fk__in=clean_parents) | Q(_self_ref_fk='')).filter(query)
# keep iterating until size of dirty_children is 0
while len(dirty_children) > 0:
for store_model in dirty_children:
try:
app_model = store_model._deserialize_store_model(fk_cache)
if app_model:
with mute_signals(signals.pre_save, signals.post_save):
app_model.save(update_dirty_bit_to=False) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
# we update a store model after we have deserialized it to be able to mark it as a clean parent
store_model.dirty_bit = False
store_model.save(update_fields=['dirty_bit']) # depends on [control=['try'], data=[]]
except exceptions.ValidationError:
# if the app model did not validate, we leave the store dirty bit set
excluded_list.append(store_model.id) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['store_model']]
# update lists with new clean parents and dirty children
clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list()
dirty_children = Store.objects.filter(dirty_bit=True, profile=profile, _self_ref_fk__in=clean_parents).filter(query) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
# array for holding db values from the fields of each model for this class
db_values = []
fields = klass_model._meta.fields
for store_model in Store.objects.filter(model_name=model_name, profile=profile, dirty_bit=True):
try:
app_model = store_model._deserialize_store_model(fk_cache)
# if the model was not deleted add its field values to the list
if app_model:
for f in fields:
value = getattr(app_model, f.attname)
db_value = f.get_db_prep_value(value, connection)
db_values.append(db_value) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except exceptions.ValidationError:
# if the app model did not validate, we leave the store dirty bit set
excluded_list.append(store_model.id) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['store_model']]
if db_values:
# number of rows to update
num_of_rows = len(db_values) // len(fields)
# create '%s' placeholders for a single row
placeholder_tuple = tuple(['%s' for _ in range(len(fields))])
# create list of the '%s' tuple placeholders based on number of rows to update
placeholder_list = [str(placeholder_tuple) for _ in range(num_of_rows)]
with connection.cursor() as cursor:
DBBackend._bulk_insert_into_app_models(cursor, klass_model._meta.db_table, fields, db_values, placeholder_list) # depends on [control=['with'], data=['cursor']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# clear dirty bit for all store models for this profile except for models that did not validate
Store.objects.exclude(id__in=excluded_list).filter(profile=profile, dirty_bit=True).update(dirty_bit=False) # depends on [control=['with'], data=[]]
|
def rmtree(path):
"""Remove the given recursively.
:note: we use shutil rmtree but adjust its behaviour to see whether files that
couldn't be deleted are read-only. Windows will not remove them in that case"""
def onerror(func, path, exc_info):
# Is the error an access error ?
os.chmod(path, stat.S_IWUSR)
try:
func(path) # Will scream if still not possible to delete.
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
raise SkipTest("FIXME: fails with: PermissionError\n %s", ex)
else:
raise
return shutil.rmtree(path, False, onerror)
|
def function[rmtree, parameter[path]]:
constant[Remove the given recursively.
:note: we use shutil rmtree but adjust its behaviour to see whether files that
couldn't be deleted are read-only. Windows will not remove them in that case]
def function[onerror, parameter[func, path, exc_info]]:
call[name[os].chmod, parameter[name[path], name[stat].S_IWUSR]]
<ast.Try object at 0x7da1b235ae30>
return[call[name[shutil].rmtree, parameter[name[path], constant[False], name[onerror]]]]
|
keyword[def] identifier[rmtree] ( identifier[path] ):
literal[string]
keyword[def] identifier[onerror] ( identifier[func] , identifier[path] , identifier[exc_info] ):
identifier[os] . identifier[chmod] ( identifier[path] , identifier[stat] . identifier[S_IWUSR] )
keyword[try] :
identifier[func] ( identifier[path] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[if] identifier[HIDE_WINDOWS_KNOWN_ERRORS] :
keyword[raise] identifier[SkipTest] ( literal[string] , identifier[ex] )
keyword[else] :
keyword[raise]
keyword[return] identifier[shutil] . identifier[rmtree] ( identifier[path] , keyword[False] , identifier[onerror] )
|
def rmtree(path):
"""Remove the given recursively.
:note: we use shutil rmtree but adjust its behaviour to see whether files that
couldn't be deleted are read-only. Windows will not remove them in that case"""
def onerror(func, path, exc_info):
# Is the error an access error ?
os.chmod(path, stat.S_IWUSR)
try:
func(path) # Will scream if still not possible to delete. # depends on [control=['try'], data=[]]
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
raise SkipTest('FIXME: fails with: PermissionError\n %s', ex) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['ex']]
return shutil.rmtree(path, False, onerror)
|
def get_module_class(self):
"""Return the module and class as a tuple of the given class in the
initializer.
:param reload: if ``True`` then reload the module before returning the
class
"""
pkg, cname = self.parse_module_class()
logger.debug(f'pkg: {pkg}, class: {cname}')
pkg = pkg.split('.')
mod = reduce(lambda m, n: getattr(m, n), pkg[1:], __import__(pkg[0]))
logger.debug(f'mod: {mod}')
if self.reload:
importlib.reload(mod)
cls = getattr(mod, cname)
logger.debug(f'class: {cls}')
return mod, cls
|
def function[get_module_class, parameter[self]]:
constant[Return the module and class as a tuple of the given class in the
initializer.
:param reload: if ``True`` then reload the module before returning the
class
]
<ast.Tuple object at 0x7da1b0f42770> assign[=] call[name[self].parse_module_class, parameter[]]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b11e0eb0>]]
variable[pkg] assign[=] call[name[pkg].split, parameter[constant[.]]]
variable[mod] assign[=] call[name[reduce], parameter[<ast.Lambda object at 0x7da1b11e2170>, call[name[pkg]][<ast.Slice object at 0x7da1b11e1810>], call[name[__import__], parameter[call[name[pkg]][constant[0]]]]]]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b11e2590>]]
if name[self].reload begin[:]
call[name[importlib].reload, parameter[name[mod]]]
variable[cls] assign[=] call[name[getattr], parameter[name[mod], name[cname]]]
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b11e0250>]]
return[tuple[[<ast.Name object at 0x7da1b11e0340>, <ast.Name object at 0x7da1b11e0c70>]]]
|
keyword[def] identifier[get_module_class] ( identifier[self] ):
literal[string]
identifier[pkg] , identifier[cname] = identifier[self] . identifier[parse_module_class] ()
identifier[logger] . identifier[debug] ( literal[string] )
identifier[pkg] = identifier[pkg] . identifier[split] ( literal[string] )
identifier[mod] = identifier[reduce] ( keyword[lambda] identifier[m] , identifier[n] : identifier[getattr] ( identifier[m] , identifier[n] ), identifier[pkg] [ literal[int] :], identifier[__import__] ( identifier[pkg] [ literal[int] ]))
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[self] . identifier[reload] :
identifier[importlib] . identifier[reload] ( identifier[mod] )
identifier[cls] = identifier[getattr] ( identifier[mod] , identifier[cname] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] identifier[mod] , identifier[cls]
|
def get_module_class(self):
"""Return the module and class as a tuple of the given class in the
initializer.
:param reload: if ``True`` then reload the module before returning the
class
"""
(pkg, cname) = self.parse_module_class()
logger.debug(f'pkg: {pkg}, class: {cname}')
pkg = pkg.split('.')
mod = reduce(lambda m, n: getattr(m, n), pkg[1:], __import__(pkg[0]))
logger.debug(f'mod: {mod}')
if self.reload:
importlib.reload(mod) # depends on [control=['if'], data=[]]
cls = getattr(mod, cname)
logger.debug(f'class: {cls}')
return (mod, cls)
|
def get_learning_rate(self, iter):
'''
Get learning rate with exponential decay based on current iteration.
Args:
iter (int): Current iteration (starting with 0).
Returns:
float: Learning rate
'''
lr = self.scheduler.get_learning_rate(iter)
if iter < self.warmup_iter:
lr *= (iter + 1) * 1.0 / self.warmup_iter
return lr
|
def function[get_learning_rate, parameter[self, iter]]:
constant[
Get learning rate with exponential decay based on current iteration.
Args:
iter (int): Current iteration (starting with 0).
Returns:
float: Learning rate
]
variable[lr] assign[=] call[name[self].scheduler.get_learning_rate, parameter[name[iter]]]
if compare[name[iter] less[<] name[self].warmup_iter] begin[:]
<ast.AugAssign object at 0x7da204567850>
return[name[lr]]
|
keyword[def] identifier[get_learning_rate] ( identifier[self] , identifier[iter] ):
literal[string]
identifier[lr] = identifier[self] . identifier[scheduler] . identifier[get_learning_rate] ( identifier[iter] )
keyword[if] identifier[iter] < identifier[self] . identifier[warmup_iter] :
identifier[lr] *=( identifier[iter] + literal[int] )* literal[int] / identifier[self] . identifier[warmup_iter]
keyword[return] identifier[lr]
|
def get_learning_rate(self, iter):
"""
Get learning rate with exponential decay based on current iteration.
Args:
iter (int): Current iteration (starting with 0).
Returns:
float: Learning rate
"""
lr = self.scheduler.get_learning_rate(iter)
if iter < self.warmup_iter:
lr *= (iter + 1) * 1.0 / self.warmup_iter # depends on [control=['if'], data=['iter']]
return lr
|
def decode_body(cls, header, f):
"""Generates a `MqttSubscribe` packet given a
`MqttFixedHeader`. This method asserts that header.packet_type
is `subscribe`.
Parameters
----------
header: MqttFixedHeader
f: file
Object with a read method.
Raises
------
DecodeError
When there are extra bytes at the end of the packet.
Returns
-------
int
Number of bytes consumed from ``f``.
MqttSubscribe
Object extracted from ``f``.
"""
assert header.packet_type == MqttControlPacketType.subscribe
decoder = mqtt_io.FileDecoder(mqtt_io.LimitReader(f, header.remaining_len))
packet_id, = decoder.unpack(mqtt_io.FIELD_PACKET_ID)
topics = []
while header.remaining_len > decoder.num_bytes_consumed:
num_str_bytes, name = decoder.unpack_utf8()
max_qos, = decoder.unpack(mqtt_io.FIELD_U8)
try:
sub_topic = MqttTopic(name, max_qos)
except ValueError:
raise DecodeError('Invalid QOS {}'.format(max_qos))
topics.append(sub_topic)
assert header.remaining_len == decoder.num_bytes_consumed
return decoder.num_bytes_consumed, MqttSubscribe(packet_id, topics)
|
def function[decode_body, parameter[cls, header, f]]:
constant[Generates a `MqttSubscribe` packet given a
`MqttFixedHeader`. This method asserts that header.packet_type
is `subscribe`.
Parameters
----------
header: MqttFixedHeader
f: file
Object with a read method.
Raises
------
DecodeError
When there are extra bytes at the end of the packet.
Returns
-------
int
Number of bytes consumed from ``f``.
MqttSubscribe
Object extracted from ``f``.
]
assert[compare[name[header].packet_type equal[==] name[MqttControlPacketType].subscribe]]
variable[decoder] assign[=] call[name[mqtt_io].FileDecoder, parameter[call[name[mqtt_io].LimitReader, parameter[name[f], name[header].remaining_len]]]]
<ast.Tuple object at 0x7da207f00a00> assign[=] call[name[decoder].unpack, parameter[name[mqtt_io].FIELD_PACKET_ID]]
variable[topics] assign[=] list[[]]
while compare[name[header].remaining_len greater[>] name[decoder].num_bytes_consumed] begin[:]
<ast.Tuple object at 0x7da207f028f0> assign[=] call[name[decoder].unpack_utf8, parameter[]]
<ast.Tuple object at 0x7da207f01f00> assign[=] call[name[decoder].unpack, parameter[name[mqtt_io].FIELD_U8]]
<ast.Try object at 0x7da207f03f70>
call[name[topics].append, parameter[name[sub_topic]]]
assert[compare[name[header].remaining_len equal[==] name[decoder].num_bytes_consumed]]
return[tuple[[<ast.Attribute object at 0x7da20e9b3160>, <ast.Call object at 0x7da20e9b0dc0>]]]
|
keyword[def] identifier[decode_body] ( identifier[cls] , identifier[header] , identifier[f] ):
literal[string]
keyword[assert] identifier[header] . identifier[packet_type] == identifier[MqttControlPacketType] . identifier[subscribe]
identifier[decoder] = identifier[mqtt_io] . identifier[FileDecoder] ( identifier[mqtt_io] . identifier[LimitReader] ( identifier[f] , identifier[header] . identifier[remaining_len] ))
identifier[packet_id] ,= identifier[decoder] . identifier[unpack] ( identifier[mqtt_io] . identifier[FIELD_PACKET_ID] )
identifier[topics] =[]
keyword[while] identifier[header] . identifier[remaining_len] > identifier[decoder] . identifier[num_bytes_consumed] :
identifier[num_str_bytes] , identifier[name] = identifier[decoder] . identifier[unpack_utf8] ()
identifier[max_qos] ,= identifier[decoder] . identifier[unpack] ( identifier[mqtt_io] . identifier[FIELD_U8] )
keyword[try] :
identifier[sub_topic] = identifier[MqttTopic] ( identifier[name] , identifier[max_qos] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[DecodeError] ( literal[string] . identifier[format] ( identifier[max_qos] ))
identifier[topics] . identifier[append] ( identifier[sub_topic] )
keyword[assert] identifier[header] . identifier[remaining_len] == identifier[decoder] . identifier[num_bytes_consumed]
keyword[return] identifier[decoder] . identifier[num_bytes_consumed] , identifier[MqttSubscribe] ( identifier[packet_id] , identifier[topics] )
|
def decode_body(cls, header, f):
"""Generates a `MqttSubscribe` packet given a
`MqttFixedHeader`. This method asserts that header.packet_type
is `subscribe`.
Parameters
----------
header: MqttFixedHeader
f: file
Object with a read method.
Raises
------
DecodeError
When there are extra bytes at the end of the packet.
Returns
-------
int
Number of bytes consumed from ``f``.
MqttSubscribe
Object extracted from ``f``.
"""
assert header.packet_type == MqttControlPacketType.subscribe
decoder = mqtt_io.FileDecoder(mqtt_io.LimitReader(f, header.remaining_len))
(packet_id,) = decoder.unpack(mqtt_io.FIELD_PACKET_ID)
topics = []
while header.remaining_len > decoder.num_bytes_consumed:
(num_str_bytes, name) = decoder.unpack_utf8()
(max_qos,) = decoder.unpack(mqtt_io.FIELD_U8)
try:
sub_topic = MqttTopic(name, max_qos) # depends on [control=['try'], data=[]]
except ValueError:
raise DecodeError('Invalid QOS {}'.format(max_qos)) # depends on [control=['except'], data=[]]
topics.append(sub_topic) # depends on [control=['while'], data=[]]
assert header.remaining_len == decoder.num_bytes_consumed
return (decoder.num_bytes_consumed, MqttSubscribe(packet_id, topics))
|
def _generate_splice(self, slice_ind):
'''
Creates a splice size version of the ZeroList
'''
step_size = slice_ind.step if slice_ind.step else 1
# Check for each of the four possible scenarios
if slice_ind.start != None:
if slice_ind.stop != None:
newListLen = ((get_non_negative_index(slice_ind.stop, self._length) -
get_non_negative_index(slice_ind.start, self._length)) // step_size)
else:
newListLen = ((self._length -
get_non_negative_index(slice_ind.start, self._length)) // step_size)
else:
if slice_ind.stop != None:
newListLen = ((get_non_negative_index(slice_ind.stop, self._length)) // step_size)
else:
newListLen = (self._length // step_size)
return ZeroList(newListLen)
|
def function[_generate_splice, parameter[self, slice_ind]]:
constant[
Creates a splice size version of the ZeroList
]
variable[step_size] assign[=] <ast.IfExp object at 0x7da1b15f1cc0>
if compare[name[slice_ind].start not_equal[!=] constant[None]] begin[:]
if compare[name[slice_ind].stop not_equal[!=] constant[None]] begin[:]
variable[newListLen] assign[=] binary_operation[binary_operation[call[name[get_non_negative_index], parameter[name[slice_ind].stop, name[self]._length]] - call[name[get_non_negative_index], parameter[name[slice_ind].start, name[self]._length]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[step_size]]
return[call[name[ZeroList], parameter[name[newListLen]]]]
|
keyword[def] identifier[_generate_splice] ( identifier[self] , identifier[slice_ind] ):
literal[string]
identifier[step_size] = identifier[slice_ind] . identifier[step] keyword[if] identifier[slice_ind] . identifier[step] keyword[else] literal[int]
keyword[if] identifier[slice_ind] . identifier[start] != keyword[None] :
keyword[if] identifier[slice_ind] . identifier[stop] != keyword[None] :
identifier[newListLen] =(( identifier[get_non_negative_index] ( identifier[slice_ind] . identifier[stop] , identifier[self] . identifier[_length] )-
identifier[get_non_negative_index] ( identifier[slice_ind] . identifier[start] , identifier[self] . identifier[_length] ))// identifier[step_size] )
keyword[else] :
identifier[newListLen] =(( identifier[self] . identifier[_length] -
identifier[get_non_negative_index] ( identifier[slice_ind] . identifier[start] , identifier[self] . identifier[_length] ))// identifier[step_size] )
keyword[else] :
keyword[if] identifier[slice_ind] . identifier[stop] != keyword[None] :
identifier[newListLen] =(( identifier[get_non_negative_index] ( identifier[slice_ind] . identifier[stop] , identifier[self] . identifier[_length] ))// identifier[step_size] )
keyword[else] :
identifier[newListLen] =( identifier[self] . identifier[_length] // identifier[step_size] )
keyword[return] identifier[ZeroList] ( identifier[newListLen] )
|
def _generate_splice(self, slice_ind):
"""
Creates a splice size version of the ZeroList
"""
step_size = slice_ind.step if slice_ind.step else 1
# Check for each of the four possible scenarios
if slice_ind.start != None:
if slice_ind.stop != None:
newListLen = (get_non_negative_index(slice_ind.stop, self._length) - get_non_negative_index(slice_ind.start, self._length)) // step_size # depends on [control=['if'], data=[]]
else:
newListLen = (self._length - get_non_negative_index(slice_ind.start, self._length)) // step_size # depends on [control=['if'], data=[]]
elif slice_ind.stop != None:
newListLen = get_non_negative_index(slice_ind.stop, self._length) // step_size # depends on [control=['if'], data=[]]
else:
newListLen = self._length // step_size
return ZeroList(newListLen)
|
def register_config(self, cls, name):
"""
register a configuration class
:param cls:
:param name:
:return:
"""
self._configs.add(cls)
self._config_names.add(name)
|
def function[register_config, parameter[self, cls, name]]:
constant[
register a configuration class
:param cls:
:param name:
:return:
]
call[name[self]._configs.add, parameter[name[cls]]]
call[name[self]._config_names.add, parameter[name[name]]]
|
keyword[def] identifier[register_config] ( identifier[self] , identifier[cls] , identifier[name] ):
literal[string]
identifier[self] . identifier[_configs] . identifier[add] ( identifier[cls] )
identifier[self] . identifier[_config_names] . identifier[add] ( identifier[name] )
|
def register_config(self, cls, name):
"""
register a configuration class
:param cls:
:param name:
:return:
"""
self._configs.add(cls)
self._config_names.add(name)
|
def consumer(function):
"""Decorator that makes a generator function automatically advance to its
first yield point when initially called (PEP 342)."""
@wraps(function)
def wrapper(*args, **kwargs):
generator = function(*args, **kwargs)
next(generator)
return generator
return wrapper
|
def function[consumer, parameter[function]]:
constant[Decorator that makes a generator function automatically advance to its
first yield point when initially called (PEP 342).]
def function[wrapper, parameter[]]:
variable[generator] assign[=] call[name[function], parameter[<ast.Starred object at 0x7da18dc9aa10>]]
call[name[next], parameter[name[generator]]]
return[name[generator]]
return[name[wrapper]]
|
keyword[def] identifier[consumer] ( identifier[function] ):
literal[string]
@ identifier[wraps] ( identifier[function] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[generator] = identifier[function] (* identifier[args] ,** identifier[kwargs] )
identifier[next] ( identifier[generator] )
keyword[return] identifier[generator]
keyword[return] identifier[wrapper]
|
def consumer(function):
"""Decorator that makes a generator function automatically advance to its
first yield point when initially called (PEP 342)."""
@wraps(function)
def wrapper(*args, **kwargs):
generator = function(*args, **kwargs)
next(generator)
return generator
return wrapper
|
def delete(self,pool_or_cursor):
".. warning:: pgmock doesn't support delete yet, so this isn't tested"
vals=self.pkey_vals()
whereclause=' and '.join('%s=%%s'%k for k in self.PKEY.split(','))
q='delete from %s where %s'%(self.TABLE,whereclause)
commit_or_execute(pool_or_cursor,q,vals)
|
def function[delete, parameter[self, pool_or_cursor]]:
constant[.. warning:: pgmock doesn't support delete yet, so this isn't tested]
variable[vals] assign[=] call[name[self].pkey_vals, parameter[]]
variable[whereclause] assign[=] call[constant[ and ].join, parameter[<ast.GeneratorExp object at 0x7da1b13060b0>]]
variable[q] assign[=] binary_operation[constant[delete from %s where %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b15b3d30>, <ast.Name object at 0x7da1b15b3a30>]]]
call[name[commit_or_execute], parameter[name[pool_or_cursor], name[q], name[vals]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[pool_or_cursor] ):
literal[string]
identifier[vals] = identifier[self] . identifier[pkey_vals] ()
identifier[whereclause] = literal[string] . identifier[join] ( literal[string] % identifier[k] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[PKEY] . identifier[split] ( literal[string] ))
identifier[q] = literal[string] %( identifier[self] . identifier[TABLE] , identifier[whereclause] )
identifier[commit_or_execute] ( identifier[pool_or_cursor] , identifier[q] , identifier[vals] )
|
def delete(self, pool_or_cursor):
""".. warning:: pgmock doesn't support delete yet, so this isn't tested"""
vals = self.pkey_vals()
whereclause = ' and '.join(('%s=%%s' % k for k in self.PKEY.split(',')))
q = 'delete from %s where %s' % (self.TABLE, whereclause)
commit_or_execute(pool_or_cursor, q, vals)
|
def join_field(path):
"""
RETURN field SEQUENCE AS STRING
"""
output = ".".join([f.replace(".", "\\.") for f in path if f != None])
return output if output else "."
|
def function[join_field, parameter[path]]:
constant[
RETURN field SEQUENCE AS STRING
]
variable[output] assign[=] call[constant[.].join, parameter[<ast.ListComp object at 0x7da18f7204c0>]]
return[<ast.IfExp object at 0x7da18f721ae0>]
|
keyword[def] identifier[join_field] ( identifier[path] ):
literal[string]
identifier[output] = literal[string] . identifier[join] ([ identifier[f] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[f] keyword[in] identifier[path] keyword[if] identifier[f] != keyword[None] ])
keyword[return] identifier[output] keyword[if] identifier[output] keyword[else] literal[string]
|
def join_field(path):
"""
RETURN field SEQUENCE AS STRING
"""
output = '.'.join([f.replace('.', '\\.') for f in path if f != None])
return output if output else '.'
|
def _post_clean(self):
"""Run password validaton after clean methods
When clean methods are run, the user instance does not yet
exist. To properly compare model values agains the password (in
the UserAttributeSimilarityValidator), we wait until we have an
instance to compare against.
https://code.djangoproject.com/ticket/28127
https://github.com/django/django/pull/8408
Has no effect in Django prior to 1.9
May become unnecessary in Django 2.0 (if this superclass changes)
"""
super()._post_clean() # updates self.instance with form data
password = self.cleaned_data.get('password1')
if password:
try:
password_validation.validate_password(password, self.instance)
except ValidationError as error:
self.add_error('password1', error)
|
def function[_post_clean, parameter[self]]:
constant[Run password validaton after clean methods
When clean methods are run, the user instance does not yet
exist. To properly compare model values agains the password (in
the UserAttributeSimilarityValidator), we wait until we have an
instance to compare against.
https://code.djangoproject.com/ticket/28127
https://github.com/django/django/pull/8408
Has no effect in Django prior to 1.9
May become unnecessary in Django 2.0 (if this superclass changes)
]
call[call[name[super], parameter[]]._post_clean, parameter[]]
variable[password] assign[=] call[name[self].cleaned_data.get, parameter[constant[password1]]]
if name[password] begin[:]
<ast.Try object at 0x7da204622080>
|
keyword[def] identifier[_post_clean] ( identifier[self] ):
literal[string]
identifier[super] (). identifier[_post_clean] ()
identifier[password] = identifier[self] . identifier[cleaned_data] . identifier[get] ( literal[string] )
keyword[if] identifier[password] :
keyword[try] :
identifier[password_validation] . identifier[validate_password] ( identifier[password] , identifier[self] . identifier[instance] )
keyword[except] identifier[ValidationError] keyword[as] identifier[error] :
identifier[self] . identifier[add_error] ( literal[string] , identifier[error] )
|
def _post_clean(self):
"""Run password validaton after clean methods
When clean methods are run, the user instance does not yet
exist. To properly compare model values agains the password (in
the UserAttributeSimilarityValidator), we wait until we have an
instance to compare against.
https://code.djangoproject.com/ticket/28127
https://github.com/django/django/pull/8408
Has no effect in Django prior to 1.9
May become unnecessary in Django 2.0 (if this superclass changes)
"""
super()._post_clean() # updates self.instance with form data
password = self.cleaned_data.get('password1')
if password:
try:
password_validation.validate_password(password, self.instance) # depends on [control=['try'], data=[]]
except ValidationError as error:
self.add_error('password1', error) # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]]
|
def _ast_op_multiply_to_code(self, opr, ignore_whitespace=False, **kwargs):
"""Convert an AST multiply op to python source code."""
opl, opr = opr.operands
if isinstance(opl, Number):
times = opl.value
subject = self._ast_to_code(opr)
else:
times = opr.value
subject = self._ast_to_code(opl)
lines = ["repeated("]
lines.extend(self._indent(subject))
lines[-1] += ","
lines.append("{0}times={1},".format(self.indent, times))
lines.append("{0}ignore_whitespace={1}".format(self.indent, bool(ignore_whitespace)))
lines.append(")")
return lines
|
def function[_ast_op_multiply_to_code, parameter[self, opr, ignore_whitespace]]:
constant[Convert an AST multiply op to python source code.]
<ast.Tuple object at 0x7da1b01fc2b0> assign[=] name[opr].operands
if call[name[isinstance], parameter[name[opl], name[Number]]] begin[:]
variable[times] assign[=] name[opl].value
variable[subject] assign[=] call[name[self]._ast_to_code, parameter[name[opr]]]
variable[lines] assign[=] list[[<ast.Constant object at 0x7da1b01fe290>]]
call[name[lines].extend, parameter[call[name[self]._indent, parameter[name[subject]]]]]
<ast.AugAssign object at 0x7da1b01feb30>
call[name[lines].append, parameter[call[constant[{0}times={1},].format, parameter[name[self].indent, name[times]]]]]
call[name[lines].append, parameter[call[constant[{0}ignore_whitespace={1}].format, parameter[name[self].indent, call[name[bool], parameter[name[ignore_whitespace]]]]]]]
call[name[lines].append, parameter[constant[)]]]
return[name[lines]]
|
keyword[def] identifier[_ast_op_multiply_to_code] ( identifier[self] , identifier[opr] , identifier[ignore_whitespace] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[opl] , identifier[opr] = identifier[opr] . identifier[operands]
keyword[if] identifier[isinstance] ( identifier[opl] , identifier[Number] ):
identifier[times] = identifier[opl] . identifier[value]
identifier[subject] = identifier[self] . identifier[_ast_to_code] ( identifier[opr] )
keyword[else] :
identifier[times] = identifier[opr] . identifier[value]
identifier[subject] = identifier[self] . identifier[_ast_to_code] ( identifier[opl] )
identifier[lines] =[ literal[string] ]
identifier[lines] . identifier[extend] ( identifier[self] . identifier[_indent] ( identifier[subject] ))
identifier[lines] [- literal[int] ]+= literal[string]
identifier[lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[indent] , identifier[times] ))
identifier[lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[indent] , identifier[bool] ( identifier[ignore_whitespace] )))
identifier[lines] . identifier[append] ( literal[string] )
keyword[return] identifier[lines]
|
def _ast_op_multiply_to_code(self, opr, ignore_whitespace=False, **kwargs):
"""Convert an AST multiply op to python source code."""
(opl, opr) = opr.operands
if isinstance(opl, Number):
times = opl.value
subject = self._ast_to_code(opr) # depends on [control=['if'], data=[]]
else:
times = opr.value
subject = self._ast_to_code(opl)
lines = ['repeated(']
lines.extend(self._indent(subject))
lines[-1] += ','
lines.append('{0}times={1},'.format(self.indent, times))
lines.append('{0}ignore_whitespace={1}'.format(self.indent, bool(ignore_whitespace)))
lines.append(')')
return lines
|
def duration_to_number(duration, units='seconds'):
"""If duration is already a numeric type, then just return
duration. If duration is a timedelta, return a duration in
seconds.
TODO: allow for multiple types of units.
"""
if isinstance(duration, (int, float, long)):
return duration
elif isinstance(duration, (datetime.timedelta,)):
if units == 'seconds':
return duration.total_seconds()
else:
msg = 'unit "%s" is not supported' % units
raise NotImplementedError(msg)
elif duration == inf or duration == -inf:
msg = "Can't convert infinite duration to number"
raise ValueError(msg)
else:
msg = 'duration is an unknown type (%s)' % duration
raise TypeError(msg)
|
def function[duration_to_number, parameter[duration, units]]:
constant[If duration is already a numeric type, then just return
duration. If duration is a timedelta, return a duration in
seconds.
TODO: allow for multiple types of units.
]
if call[name[isinstance], parameter[name[duration], tuple[[<ast.Name object at 0x7da1b08609d0>, <ast.Name object at 0x7da1b08605e0>, <ast.Name object at 0x7da1b0860f40>]]]] begin[:]
return[name[duration]]
|
keyword[def] identifier[duration_to_number] ( identifier[duration] , identifier[units] = literal[string] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[duration] ,( identifier[int] , identifier[float] , identifier[long] )):
keyword[return] identifier[duration]
keyword[elif] identifier[isinstance] ( identifier[duration] ,( identifier[datetime] . identifier[timedelta] ,)):
keyword[if] identifier[units] == literal[string] :
keyword[return] identifier[duration] . identifier[total_seconds] ()
keyword[else] :
identifier[msg] = literal[string] % identifier[units]
keyword[raise] identifier[NotImplementedError] ( identifier[msg] )
keyword[elif] identifier[duration] == identifier[inf] keyword[or] identifier[duration] ==- identifier[inf] :
identifier[msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[else] :
identifier[msg] = literal[string] % identifier[duration]
keyword[raise] identifier[TypeError] ( identifier[msg] )
|
def duration_to_number(duration, units='seconds'):
"""If duration is already a numeric type, then just return
duration. If duration is a timedelta, return a duration in
seconds.
TODO: allow for multiple types of units.
"""
if isinstance(duration, (int, float, long)):
return duration # depends on [control=['if'], data=[]]
elif isinstance(duration, (datetime.timedelta,)):
if units == 'seconds':
return duration.total_seconds() # depends on [control=['if'], data=[]]
else:
msg = 'unit "%s" is not supported' % units
raise NotImplementedError(msg) # depends on [control=['if'], data=[]]
elif duration == inf or duration == -inf:
msg = "Can't convert infinite duration to number"
raise ValueError(msg) # depends on [control=['if'], data=[]]
else:
msg = 'duration is an unknown type (%s)' % duration
raise TypeError(msg)
|
def add_url_rule(self, host, rule_string, endpoint, **options):
"""Add a url rule to the app instance.
The url rule is the same with Flask apps and other Werkzeug apps.
:param host: the matched hostname. e.g. "www.python.org"
:param rule_string: the matched path pattern. e.g. "/news/<int:id>"
:param endpoint: the endpoint name as a dispatching key such as the
qualified name of the object.
"""
rule = Rule(rule_string, host=host, endpoint=endpoint, **options)
self.url_map.add(rule)
|
def function[add_url_rule, parameter[self, host, rule_string, endpoint]]:
constant[Add a url rule to the app instance.
The url rule is the same with Flask apps and other Werkzeug apps.
:param host: the matched hostname. e.g. "www.python.org"
:param rule_string: the matched path pattern. e.g. "/news/<int:id>"
:param endpoint: the endpoint name as a dispatching key such as the
qualified name of the object.
]
variable[rule] assign[=] call[name[Rule], parameter[name[rule_string]]]
call[name[self].url_map.add, parameter[name[rule]]]
|
keyword[def] identifier[add_url_rule] ( identifier[self] , identifier[host] , identifier[rule_string] , identifier[endpoint] ,** identifier[options] ):
literal[string]
identifier[rule] = identifier[Rule] ( identifier[rule_string] , identifier[host] = identifier[host] , identifier[endpoint] = identifier[endpoint] ,** identifier[options] )
identifier[self] . identifier[url_map] . identifier[add] ( identifier[rule] )
|
def add_url_rule(self, host, rule_string, endpoint, **options):
"""Add a url rule to the app instance.
The url rule is the same with Flask apps and other Werkzeug apps.
:param host: the matched hostname. e.g. "www.python.org"
:param rule_string: the matched path pattern. e.g. "/news/<int:id>"
:param endpoint: the endpoint name as a dispatching key such as the
qualified name of the object.
"""
rule = Rule(rule_string, host=host, endpoint=endpoint, **options)
self.url_map.add(rule)
|
def attach_template(self, _template, _key, **unbound_var_values):
"""Attaches the template to this with the _key is supplied with this layer.
Note: names were chosen to avoid conflicts.
Args:
_template: The template to construct.
_key: The key that this layer should replace.
**unbound_var_values: The values for the unbound_vars.
Returns:
A new layer with operation applied.
Raises:
ValueError: If _key is specified twice or there is a problem computing the
template.
"""
if _key in unbound_var_values:
raise ValueError('%s specified twice.' % _key)
unbound_var_values[_key] = self
return _DeferredLayer(self.bookkeeper,
_template.as_layer().construct,
[],
unbound_var_values,
scope=self._scope,
defaults=self._defaults,
partial_context=self._partial_context)
|
def function[attach_template, parameter[self, _template, _key]]:
constant[Attaches the template to this with the _key is supplied with this layer.
Note: names were chosen to avoid conflicts.
Args:
_template: The template to construct.
_key: The key that this layer should replace.
**unbound_var_values: The values for the unbound_vars.
Returns:
A new layer with operation applied.
Raises:
ValueError: If _key is specified twice or there is a problem computing the
template.
]
if compare[name[_key] in name[unbound_var_values]] begin[:]
<ast.Raise object at 0x7da1b23466b0>
call[name[unbound_var_values]][name[_key]] assign[=] name[self]
return[call[name[_DeferredLayer], parameter[name[self].bookkeeper, call[name[_template].as_layer, parameter[]].construct, list[[]], name[unbound_var_values]]]]
|
keyword[def] identifier[attach_template] ( identifier[self] , identifier[_template] , identifier[_key] ,** identifier[unbound_var_values] ):
literal[string]
keyword[if] identifier[_key] keyword[in] identifier[unbound_var_values] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[_key] )
identifier[unbound_var_values] [ identifier[_key] ]= identifier[self]
keyword[return] identifier[_DeferredLayer] ( identifier[self] . identifier[bookkeeper] ,
identifier[_template] . identifier[as_layer] (). identifier[construct] ,
[],
identifier[unbound_var_values] ,
identifier[scope] = identifier[self] . identifier[_scope] ,
identifier[defaults] = identifier[self] . identifier[_defaults] ,
identifier[partial_context] = identifier[self] . identifier[_partial_context] )
|
def attach_template(self, _template, _key, **unbound_var_values):
"""Attaches the template to this with the _key is supplied with this layer.
Note: names were chosen to avoid conflicts.
Args:
_template: The template to construct.
_key: The key that this layer should replace.
**unbound_var_values: The values for the unbound_vars.
Returns:
A new layer with operation applied.
Raises:
ValueError: If _key is specified twice or there is a problem computing the
template.
"""
if _key in unbound_var_values:
raise ValueError('%s specified twice.' % _key) # depends on [control=['if'], data=['_key']]
unbound_var_values[_key] = self
return _DeferredLayer(self.bookkeeper, _template.as_layer().construct, [], unbound_var_values, scope=self._scope, defaults=self._defaults, partial_context=self._partial_context)
|
def load(self, filename, offset):
"""Loads NTFS volume information
Args:
filename (str): Path to file/device to read the volume \
information from.
offset (uint): Valid NTFS partition offset from the beginning \
of the file/device.
Raises:
IOError: If source file/device does not exist or is not readable
"""
self.offset = offset
self.filename = filename
self.bootsector = BootSector(
filename=filename,
length=NTFS_BOOTSECTOR_SIZE,
offset=self.offset)
self.mft_table = MftTable(
mft_entry_size=self.bootsector.mft_record_size,
filename=self.filename,
offset=self.mft_table_offset
)
self.mft_table.preload_entries(NUM_SYSTEM_ENTRIES)
self._load_volume_information()
|
def function[load, parameter[self, filename, offset]]:
constant[Loads NTFS volume information
Args:
filename (str): Path to file/device to read the volume information from.
offset (uint): Valid NTFS partition offset from the beginning of the file/device.
Raises:
IOError: If source file/device does not exist or is not readable
]
name[self].offset assign[=] name[offset]
name[self].filename assign[=] name[filename]
name[self].bootsector assign[=] call[name[BootSector], parameter[]]
name[self].mft_table assign[=] call[name[MftTable], parameter[]]
call[name[self].mft_table.preload_entries, parameter[name[NUM_SYSTEM_ENTRIES]]]
call[name[self]._load_volume_information, parameter[]]
|
keyword[def] identifier[load] ( identifier[self] , identifier[filename] , identifier[offset] ):
literal[string]
identifier[self] . identifier[offset] = identifier[offset]
identifier[self] . identifier[filename] = identifier[filename]
identifier[self] . identifier[bootsector] = identifier[BootSector] (
identifier[filename] = identifier[filename] ,
identifier[length] = identifier[NTFS_BOOTSECTOR_SIZE] ,
identifier[offset] = identifier[self] . identifier[offset] )
identifier[self] . identifier[mft_table] = identifier[MftTable] (
identifier[mft_entry_size] = identifier[self] . identifier[bootsector] . identifier[mft_record_size] ,
identifier[filename] = identifier[self] . identifier[filename] ,
identifier[offset] = identifier[self] . identifier[mft_table_offset]
)
identifier[self] . identifier[mft_table] . identifier[preload_entries] ( identifier[NUM_SYSTEM_ENTRIES] )
identifier[self] . identifier[_load_volume_information] ()
|
def load(self, filename, offset):
"""Loads NTFS volume information
Args:
filename (str): Path to file/device to read the volume information from.
offset (uint): Valid NTFS partition offset from the beginning of the file/device.
Raises:
IOError: If source file/device does not exist or is not readable
"""
self.offset = offset
self.filename = filename
self.bootsector = BootSector(filename=filename, length=NTFS_BOOTSECTOR_SIZE, offset=self.offset)
self.mft_table = MftTable(mft_entry_size=self.bootsector.mft_record_size, filename=self.filename, offset=self.mft_table_offset)
self.mft_table.preload_entries(NUM_SYSTEM_ENTRIES)
self._load_volume_information()
|
def dehydrate(self, iterator):
"""
Pass in an iterator of tweets' JSON and get back an iterator of the
IDs of each tweet.
"""
for line in iterator:
try:
yield json.loads(line)['id_str']
except Exception as e:
log.error("uhoh: %s\n" % e)
|
def function[dehydrate, parameter[self, iterator]]:
constant[
Pass in an iterator of tweets' JSON and get back an iterator of the
IDs of each tweet.
]
for taget[name[line]] in starred[name[iterator]] begin[:]
<ast.Try object at 0x7da1b17d7d60>
|
keyword[def] identifier[dehydrate] ( identifier[self] , identifier[iterator] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[iterator] :
keyword[try] :
keyword[yield] identifier[json] . identifier[loads] ( identifier[line] )[ literal[string] ]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( literal[string] % identifier[e] )
|
def dehydrate(self, iterator):
"""
Pass in an iterator of tweets' JSON and get back an iterator of the
IDs of each tweet.
"""
for line in iterator:
try:
yield json.loads(line)['id_str'] # depends on [control=['try'], data=[]]
except Exception as e:
log.error('uhoh: %s\n' % e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['line']]
|
def prepare_modules(module_paths: list, available: dict) -> dict:
"""
Scan all paths for external modules and form key-value dict.
:param module_paths: list of external modules (either python packages or third-party scripts)
:param available: dict of all registered python modules (can contain python modules from module_paths)
:return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths
"""
indexed = {}
for path in module_paths:
if not os.path.exists(path) and path not in available:
err = 'No such path: ' + path
error(err)
else:
for f in os.listdir(path):
mod_path = join(path, f)
if f in indexed:
warning('Override ' + indexed[f] + ' with ' + mod_path)
indexed[f] = mod_path
return indexed
|
def function[prepare_modules, parameter[module_paths, available]]:
constant[
Scan all paths for external modules and form key-value dict.
:param module_paths: list of external modules (either python packages or third-party scripts)
:param available: dict of all registered python modules (can contain python modules from module_paths)
:return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths
]
variable[indexed] assign[=] dictionary[[], []]
for taget[name[path]] in starred[name[module_paths]] begin[:]
if <ast.BoolOp object at 0x7da20e963af0> begin[:]
variable[err] assign[=] binary_operation[constant[No such path: ] + name[path]]
call[name[error], parameter[name[err]]]
return[name[indexed]]
|
keyword[def] identifier[prepare_modules] ( identifier[module_paths] : identifier[list] , identifier[available] : identifier[dict] )-> identifier[dict] :
literal[string]
identifier[indexed] ={}
keyword[for] identifier[path] keyword[in] identifier[module_paths] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ) keyword[and] identifier[path] keyword[not] keyword[in] identifier[available] :
identifier[err] = literal[string] + identifier[path]
identifier[error] ( identifier[err] )
keyword[else] :
keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ):
identifier[mod_path] = identifier[join] ( identifier[path] , identifier[f] )
keyword[if] identifier[f] keyword[in] identifier[indexed] :
identifier[warning] ( literal[string] + identifier[indexed] [ identifier[f] ]+ literal[string] + identifier[mod_path] )
identifier[indexed] [ identifier[f] ]= identifier[mod_path]
keyword[return] identifier[indexed]
|
def prepare_modules(module_paths: list, available: dict) -> dict:
"""
Scan all paths for external modules and form key-value dict.
:param module_paths: list of external modules (either python packages or third-party scripts)
:param available: dict of all registered python modules (can contain python modules from module_paths)
:return: dict of external modules, where keys are filenames (same as stepnames) and values are the paths
"""
indexed = {}
for path in module_paths:
if not os.path.exists(path) and path not in available:
err = 'No such path: ' + path
error(err) # depends on [control=['if'], data=[]]
else:
for f in os.listdir(path):
mod_path = join(path, f)
if f in indexed:
warning('Override ' + indexed[f] + ' with ' + mod_path) # depends on [control=['if'], data=['f', 'indexed']]
indexed[f] = mod_path # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['path']]
return indexed
|
def get_channelstate_settled(
chain_state: ChainState,
payment_network_id: PaymentNetworkID,
token_address: TokenAddress,
) -> List[NettingChannelState]:
"""Return the state of settled channels in a token network."""
return get_channelstate_filter(
chain_state,
payment_network_id,
token_address,
lambda channel_state: channel.get_status(channel_state) == CHANNEL_STATE_SETTLED,
)
|
def function[get_channelstate_settled, parameter[chain_state, payment_network_id, token_address]]:
constant[Return the state of settled channels in a token network.]
return[call[name[get_channelstate_filter], parameter[name[chain_state], name[payment_network_id], name[token_address], <ast.Lambda object at 0x7da1b170d990>]]]
|
keyword[def] identifier[get_channelstate_settled] (
identifier[chain_state] : identifier[ChainState] ,
identifier[payment_network_id] : identifier[PaymentNetworkID] ,
identifier[token_address] : identifier[TokenAddress] ,
)-> identifier[List] [ identifier[NettingChannelState] ]:
literal[string]
keyword[return] identifier[get_channelstate_filter] (
identifier[chain_state] ,
identifier[payment_network_id] ,
identifier[token_address] ,
keyword[lambda] identifier[channel_state] : identifier[channel] . identifier[get_status] ( identifier[channel_state] )== identifier[CHANNEL_STATE_SETTLED] ,
)
|
def get_channelstate_settled(chain_state: ChainState, payment_network_id: PaymentNetworkID, token_address: TokenAddress) -> List[NettingChannelState]:
"""Return the state of settled channels in a token network."""
return get_channelstate_filter(chain_state, payment_network_id, token_address, lambda channel_state: channel.get_status(channel_state) == CHANNEL_STATE_SETTLED)
|
def _validate_channel_definition(self, jp2h, colr):
"""Validate the channel definition box."""
cdef_lst = [j for (j, box) in enumerate(jp2h.box)
if box.box_id == 'cdef']
if len(cdef_lst) > 1:
msg = ("Only one channel definition box is allowed in the "
"JP2 header.")
raise IOError(msg)
elif len(cdef_lst) == 1:
cdef = jp2h.box[cdef_lst[0]]
if colr.colorspace == core.SRGB:
if any([chan + 1 not in cdef.association or
cdef.channel_type[chan] != 0 for chan in [0, 1, 2]]):
msg = ("All color channels must be defined in the "
"channel definition box.")
raise IOError(msg)
elif colr.colorspace == core.GREYSCALE:
if 0 not in cdef.channel_type:
msg = ("All color channels must be defined in the "
"channel definition box.")
raise IOError(msg)
|
def function[_validate_channel_definition, parameter[self, jp2h, colr]]:
constant[Validate the channel definition box.]
variable[cdef_lst] assign[=] <ast.ListComp object at 0x7da1b26aed70>
if compare[call[name[len], parameter[name[cdef_lst]]] greater[>] constant[1]] begin[:]
variable[msg] assign[=] constant[Only one channel definition box is allowed in the JP2 header.]
<ast.Raise object at 0x7da204961c90>
|
keyword[def] identifier[_validate_channel_definition] ( identifier[self] , identifier[jp2h] , identifier[colr] ):
literal[string]
identifier[cdef_lst] =[ identifier[j] keyword[for] ( identifier[j] , identifier[box] ) keyword[in] identifier[enumerate] ( identifier[jp2h] . identifier[box] )
keyword[if] identifier[box] . identifier[box_id] == literal[string] ]
keyword[if] identifier[len] ( identifier[cdef_lst] )> literal[int] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[elif] identifier[len] ( identifier[cdef_lst] )== literal[int] :
identifier[cdef] = identifier[jp2h] . identifier[box] [ identifier[cdef_lst] [ literal[int] ]]
keyword[if] identifier[colr] . identifier[colorspace] == identifier[core] . identifier[SRGB] :
keyword[if] identifier[any] ([ identifier[chan] + literal[int] keyword[not] keyword[in] identifier[cdef] . identifier[association] keyword[or]
identifier[cdef] . identifier[channel_type] [ identifier[chan] ]!= literal[int] keyword[for] identifier[chan] keyword[in] [ literal[int] , literal[int] , literal[int] ]]):
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[elif] identifier[colr] . identifier[colorspace] == identifier[core] . identifier[GREYSCALE] :
keyword[if] literal[int] keyword[not] keyword[in] identifier[cdef] . identifier[channel_type] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[IOError] ( identifier[msg] )
|
def _validate_channel_definition(self, jp2h, colr):
"""Validate the channel definition box."""
cdef_lst = [j for (j, box) in enumerate(jp2h.box) if box.box_id == 'cdef']
if len(cdef_lst) > 1:
msg = 'Only one channel definition box is allowed in the JP2 header.'
raise IOError(msg) # depends on [control=['if'], data=[]]
elif len(cdef_lst) == 1:
cdef = jp2h.box[cdef_lst[0]]
if colr.colorspace == core.SRGB:
if any([chan + 1 not in cdef.association or cdef.channel_type[chan] != 0 for chan in [0, 1, 2]]):
msg = 'All color channels must be defined in the channel definition box.'
raise IOError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif colr.colorspace == core.GREYSCALE:
if 0 not in cdef.channel_type:
msg = 'All color channels must be defined in the channel definition box.'
raise IOError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def wait_for_link_text(self, link_text, timeout=settings.LARGE_TIMEOUT):
""" The shorter version of wait_for_link_text_visible() """
if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT:
timeout = self.__get_new_timeout(timeout)
return self.wait_for_link_text_visible(link_text, timeout=timeout)
|
def function[wait_for_link_text, parameter[self, link_text, timeout]]:
constant[ The shorter version of wait_for_link_text_visible() ]
if <ast.BoolOp object at 0x7da1b1beec20> begin[:]
variable[timeout] assign[=] call[name[self].__get_new_timeout, parameter[name[timeout]]]
return[call[name[self].wait_for_link_text_visible, parameter[name[link_text]]]]
|
keyword[def] identifier[wait_for_link_text] ( identifier[self] , identifier[link_text] , identifier[timeout] = identifier[settings] . identifier[LARGE_TIMEOUT] ):
literal[string]
keyword[if] identifier[self] . identifier[timeout_multiplier] keyword[and] identifier[timeout] == identifier[settings] . identifier[LARGE_TIMEOUT] :
identifier[timeout] = identifier[self] . identifier[__get_new_timeout] ( identifier[timeout] )
keyword[return] identifier[self] . identifier[wait_for_link_text_visible] ( identifier[link_text] , identifier[timeout] = identifier[timeout] )
|
def wait_for_link_text(self, link_text, timeout=settings.LARGE_TIMEOUT):
""" The shorter version of wait_for_link_text_visible() """
if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT:
timeout = self.__get_new_timeout(timeout) # depends on [control=['if'], data=[]]
return self.wait_for_link_text_visible(link_text, timeout=timeout)
|
def status(ctx):
"""Print a status of this Lambda function"""
status = ctx.status()
click.echo(click.style('Policy', bold=True))
if status['policy']:
line = ' {} ({})'.format(
status['policy']['PolicyName'],
status['policy']['Arn'])
click.echo(click.style(line, fg='green'))
click.echo(click.style('Role', bold=True))
if status['role']:
line = ' {} ({})'.format(
status['role']['RoleName'],
status['role']['Arn'])
click.echo(click.style(line, fg='green'))
click.echo(click.style('Function', bold=True))
if status['function']:
line = ' {} ({})'.format(
status['function']['Configuration']['FunctionName'],
status['function']['Configuration']['FunctionArn'])
click.echo(click.style(line, fg='green'))
else:
click.echo(click.style(' None', fg='green'))
click.echo(click.style('Event Sources', bold=True))
if status['event_sources']:
for event_source in status['event_sources']:
if event_source:
arn = event_source.get('EventSourceArn')
state = event_source.get('State', 'Enabled')
line = ' {}: {}'.format(arn, state)
click.echo(click.style(line, fg='green'))
else:
click.echo(click.style(' None', fg='green'))
|
def function[status, parameter[ctx]]:
constant[Print a status of this Lambda function]
variable[status] assign[=] call[name[ctx].status, parameter[]]
call[name[click].echo, parameter[call[name[click].style, parameter[constant[Policy]]]]]
if call[name[status]][constant[policy]] begin[:]
variable[line] assign[=] call[constant[ {} ({})].format, parameter[call[call[name[status]][constant[policy]]][constant[PolicyName]], call[call[name[status]][constant[policy]]][constant[Arn]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[name[line]]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[constant[Role]]]]]
if call[name[status]][constant[role]] begin[:]
variable[line] assign[=] call[constant[ {} ({})].format, parameter[call[call[name[status]][constant[role]]][constant[RoleName]], call[call[name[status]][constant[role]]][constant[Arn]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[name[line]]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[constant[Function]]]]]
if call[name[status]][constant[function]] begin[:]
variable[line] assign[=] call[constant[ {} ({})].format, parameter[call[call[call[name[status]][constant[function]]][constant[Configuration]]][constant[FunctionName]], call[call[call[name[status]][constant[function]]][constant[Configuration]]][constant[FunctionArn]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[name[line]]]]]
call[name[click].echo, parameter[call[name[click].style, parameter[constant[Event Sources]]]]]
if call[name[status]][constant[event_sources]] begin[:]
for taget[name[event_source]] in starred[call[name[status]][constant[event_sources]]] begin[:]
if name[event_source] begin[:]
variable[arn] assign[=] call[name[event_source].get, parameter[constant[EventSourceArn]]]
variable[state] assign[=] call[name[event_source].get, parameter[constant[State], constant[Enabled]]]
variable[line] assign[=] call[constant[ {}: {}].format, parameter[name[arn], name[state]]]
call[name[click].echo, parameter[call[name[click].style, parameter[name[line]]]]]
|
keyword[def] identifier[status] ( identifier[ctx] ):
literal[string]
identifier[status] = identifier[ctx] . identifier[status] ()
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[bold] = keyword[True] ))
keyword[if] identifier[status] [ literal[string] ]:
identifier[line] = literal[string] . identifier[format] (
identifier[status] [ literal[string] ][ literal[string] ],
identifier[status] [ literal[string] ][ literal[string] ])
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( identifier[line] , identifier[fg] = literal[string] ))
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[bold] = keyword[True] ))
keyword[if] identifier[status] [ literal[string] ]:
identifier[line] = literal[string] . identifier[format] (
identifier[status] [ literal[string] ][ literal[string] ],
identifier[status] [ literal[string] ][ literal[string] ])
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( identifier[line] , identifier[fg] = literal[string] ))
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[bold] = keyword[True] ))
keyword[if] identifier[status] [ literal[string] ]:
identifier[line] = literal[string] . identifier[format] (
identifier[status] [ literal[string] ][ literal[string] ][ literal[string] ],
identifier[status] [ literal[string] ][ literal[string] ][ literal[string] ])
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( identifier[line] , identifier[fg] = literal[string] ))
keyword[else] :
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[fg] = literal[string] ))
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[bold] = keyword[True] ))
keyword[if] identifier[status] [ literal[string] ]:
keyword[for] identifier[event_source] keyword[in] identifier[status] [ literal[string] ]:
keyword[if] identifier[event_source] :
identifier[arn] = identifier[event_source] . identifier[get] ( literal[string] )
identifier[state] = identifier[event_source] . identifier[get] ( literal[string] , literal[string] )
identifier[line] = literal[string] . identifier[format] ( identifier[arn] , identifier[state] )
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( identifier[line] , identifier[fg] = literal[string] ))
keyword[else] :
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[fg] = literal[string] ))
|
def status(ctx):
"""Print a status of this Lambda function"""
status = ctx.status()
click.echo(click.style('Policy', bold=True))
if status['policy']:
line = ' {} ({})'.format(status['policy']['PolicyName'], status['policy']['Arn'])
click.echo(click.style(line, fg='green')) # depends on [control=['if'], data=[]]
click.echo(click.style('Role', bold=True))
if status['role']:
line = ' {} ({})'.format(status['role']['RoleName'], status['role']['Arn'])
click.echo(click.style(line, fg='green')) # depends on [control=['if'], data=[]]
click.echo(click.style('Function', bold=True))
if status['function']:
line = ' {} ({})'.format(status['function']['Configuration']['FunctionName'], status['function']['Configuration']['FunctionArn'])
click.echo(click.style(line, fg='green')) # depends on [control=['if'], data=[]]
else:
click.echo(click.style(' None', fg='green'))
click.echo(click.style('Event Sources', bold=True))
if status['event_sources']:
for event_source in status['event_sources']:
if event_source:
arn = event_source.get('EventSourceArn')
state = event_source.get('State', 'Enabled')
line = ' {}: {}'.format(arn, state)
click.echo(click.style(line, fg='green')) # depends on [control=['if'], data=[]]
else:
click.echo(click.style(' None', fg='green')) # depends on [control=['for'], data=['event_source']] # depends on [control=['if'], data=[]]
|
def _set_vcs(self, v, load=False):
"""
Setter method for vcs, mapped from YANG variable /event_handler/event_handler_list/trigger/vcs (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_vcs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vcs() directly.
YANG Description: VCS event type.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'switch-bootup': {'value': 1}, u'switch-ready-for-configuration': {'value': 2}},), is_leaf=True, yang_name="vcs", rest_name="vcs", parent=self, choice=(u'trigger-choice', u'vcs'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'VCS event type.'}}, namespace='urn:brocade.com:mgmt:brocade-event-handler', defining_module='brocade-event-handler', yang_type='enumeration', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vcs must be of a type compatible with enumeration""",
'defined-type': "brocade-event-handler:enumeration",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'switch-bootup': {'value': 1}, u'switch-ready-for-configuration': {'value': 2}},), is_leaf=True, yang_name="vcs", rest_name="vcs", parent=self, choice=(u'trigger-choice', u'vcs'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'VCS event type.'}}, namespace='urn:brocade.com:mgmt:brocade-event-handler', defining_module='brocade-event-handler', yang_type='enumeration', is_config=True)""",
})
self.__vcs = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_vcs, parameter[self, v, load]]:
constant[
Setter method for vcs, mapped from YANG variable /event_handler/event_handler_list/trigger/vcs (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_vcs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vcs() directly.
YANG Description: VCS event type.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f810940>
name[self].__vcs assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_vcs] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[choice] =( literal[string] , literal[string] ), identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__vcs] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_vcs(self, v, load=False):
"""
Setter method for vcs, mapped from YANG variable /event_handler/event_handler_list/trigger/vcs (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_vcs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vcs() directly.
YANG Description: VCS event type.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'switch-bootup': {'value': 1}, u'switch-ready-for-configuration': {'value': 2}}), is_leaf=True, yang_name='vcs', rest_name='vcs', parent=self, choice=(u'trigger-choice', u'vcs'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'VCS event type.'}}, namespace='urn:brocade.com:mgmt:brocade-event-handler', defining_module='brocade-event-handler', yang_type='enumeration', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'vcs must be of a type compatible with enumeration', 'defined-type': 'brocade-event-handler:enumeration', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'switch-bootup\': {\'value\': 1}, u\'switch-ready-for-configuration\': {\'value\': 2}},), is_leaf=True, yang_name="vcs", rest_name="vcs", parent=self, choice=(u\'trigger-choice\', u\'vcs\'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'VCS event type.\'}}, namespace=\'urn:brocade.com:mgmt:brocade-event-handler\', defining_module=\'brocade-event-handler\', yang_type=\'enumeration\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__vcs = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def get_requirement(self, line):
"""Gets requirement name and id."""
res = self.REQ_SEARCH.search(line)
try:
name, tc_id = res.group(1), res.group(2)
except (AttributeError, IndexError):
return None
return LogItem(name, tc_id, None)
|
def function[get_requirement, parameter[self, line]]:
constant[Gets requirement name and id.]
variable[res] assign[=] call[name[self].REQ_SEARCH.search, parameter[name[line]]]
<ast.Try object at 0x7da18eb55a20>
return[call[name[LogItem], parameter[name[name], name[tc_id], constant[None]]]]
|
keyword[def] identifier[get_requirement] ( identifier[self] , identifier[line] ):
literal[string]
identifier[res] = identifier[self] . identifier[REQ_SEARCH] . identifier[search] ( identifier[line] )
keyword[try] :
identifier[name] , identifier[tc_id] = identifier[res] . identifier[group] ( literal[int] ), identifier[res] . identifier[group] ( literal[int] )
keyword[except] ( identifier[AttributeError] , identifier[IndexError] ):
keyword[return] keyword[None]
keyword[return] identifier[LogItem] ( identifier[name] , identifier[tc_id] , keyword[None] )
|
def get_requirement(self, line):
"""Gets requirement name and id."""
res = self.REQ_SEARCH.search(line)
try:
(name, tc_id) = (res.group(1), res.group(2)) # depends on [control=['try'], data=[]]
except (AttributeError, IndexError):
return None # depends on [control=['except'], data=[]]
return LogItem(name, tc_id, None)
|
def await_message(self, *args, **kwargs) -> 'asyncio.Future[Message]':
"""
Block until a message matches. See `on_message`
"""
fut = asyncio.Future()
@self.on_message(*args, **kwargs)
async def handler(message):
fut.set_result(message)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_message_handler(handler))
return fut
|
def function[await_message, parameter[self]]:
constant[
Block until a message matches. See `on_message`
]
variable[fut] assign[=] call[name[asyncio].Future, parameter[]]
<ast.AsyncFunctionDef object at 0x7da2054a5a80>
call[name[fut].add_done_callback, parameter[<ast.Lambda object at 0x7da2054a7e20>]]
return[name[fut]]
|
keyword[def] identifier[await_message] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )-> literal[string] :
literal[string]
identifier[fut] = identifier[asyncio] . identifier[Future] ()
@ identifier[self] . identifier[on_message] (* identifier[args] ,** identifier[kwargs] )
keyword[async] keyword[def] identifier[handler] ( identifier[message] ):
identifier[fut] . identifier[set_result] ( identifier[message] )
identifier[fut] . identifier[add_done_callback] ( keyword[lambda] identifier[_] : identifier[self] . identifier[remove_message_handler] ( identifier[handler] ))
keyword[return] identifier[fut]
|
def await_message(self, *args, **kwargs) -> 'asyncio.Future[Message]':
"""
Block until a message matches. See `on_message`
"""
fut = asyncio.Future()
@self.on_message(*args, **kwargs)
async def handler(message):
fut.set_result(message)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_message_handler(handler))
return fut
|
def getmergerequest(self, project_id, mergerequest_id):
"""
Get information about a specific merge request.
:param project_id: ID of the project
:param mergerequest_id: ID of the merge request
:return: dict of the merge request
"""
request = requests.get(
'{0}/{1}/merge_request/{2}'.format(self.projects_url, project_id, mergerequest_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False
|
def function[getmergerequest, parameter[self, project_id, mergerequest_id]]:
constant[
Get information about a specific merge request.
:param project_id: ID of the project
:param mergerequest_id: ID of the merge request
:return: dict of the merge request
]
variable[request] assign[=] call[name[requests].get, parameter[call[constant[{0}/{1}/merge_request/{2}].format, parameter[name[self].projects_url, name[project_id], name[mergerequest_id]]]]]
if compare[name[request].status_code equal[==] constant[200]] begin[:]
return[call[name[request].json, parameter[]]]
|
keyword[def] identifier[getmergerequest] ( identifier[self] , identifier[project_id] , identifier[mergerequest_id] ):
literal[string]
identifier[request] = identifier[requests] . identifier[get] (
literal[string] . identifier[format] ( identifier[self] . identifier[projects_url] , identifier[project_id] , identifier[mergerequest_id] ),
identifier[headers] = identifier[self] . identifier[headers] , identifier[verify] = identifier[self] . identifier[verify_ssl] , identifier[auth] = identifier[self] . identifier[auth] , identifier[timeout] = identifier[self] . identifier[timeout] )
keyword[if] identifier[request] . identifier[status_code] == literal[int] :
keyword[return] identifier[request] . identifier[json] ()
keyword[else] :
keyword[return] keyword[False]
|
def getmergerequest(self, project_id, mergerequest_id):
"""
Get information about a specific merge request.
:param project_id: ID of the project
:param mergerequest_id: ID of the merge request
:return: dict of the merge request
"""
request = requests.get('{0}/{1}/merge_request/{2}'.format(self.projects_url, project_id, mergerequest_id), headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json() # depends on [control=['if'], data=[]]
else:
return False
|
def remove_stderr_file(self):
"""Remove stderr_file associated with the client."""
try:
# Defer closing the stderr_handle until the client
# is closed because jupyter_client needs it open
# while it tries to restart the kernel
self.stderr_handle.close()
os.remove(self.stderr_file)
except Exception:
pass
|
def function[remove_stderr_file, parameter[self]]:
constant[Remove stderr_file associated with the client.]
<ast.Try object at 0x7da18c4ce890>
|
keyword[def] identifier[remove_stderr_file] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[stderr_handle] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[self] . identifier[stderr_file] )
keyword[except] identifier[Exception] :
keyword[pass]
|
def remove_stderr_file(self):
"""Remove stderr_file associated with the client."""
try: # Defer closing the stderr_handle until the client
# is closed because jupyter_client needs it open
# while it tries to restart the kernel
self.stderr_handle.close()
os.remove(self.stderr_file) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]]
|
def _add_step(self, step):
"""Add a step to the workflow.
Args:
step (Step): a step from the steps library.
"""
self._closed()
self.has_workflow_step = self.has_workflow_step or step.is_workflow
self.wf_steps[step.name_in_workflow] = step
|
def function[_add_step, parameter[self, step]]:
constant[Add a step to the workflow.
Args:
step (Step): a step from the steps library.
]
call[name[self]._closed, parameter[]]
name[self].has_workflow_step assign[=] <ast.BoolOp object at 0x7da18f8139d0>
call[name[self].wf_steps][name[step].name_in_workflow] assign[=] name[step]
|
keyword[def] identifier[_add_step] ( identifier[self] , identifier[step] ):
literal[string]
identifier[self] . identifier[_closed] ()
identifier[self] . identifier[has_workflow_step] = identifier[self] . identifier[has_workflow_step] keyword[or] identifier[step] . identifier[is_workflow]
identifier[self] . identifier[wf_steps] [ identifier[step] . identifier[name_in_workflow] ]= identifier[step]
|
def _add_step(self, step):
"""Add a step to the workflow.
Args:
step (Step): a step from the steps library.
"""
self._closed()
self.has_workflow_step = self.has_workflow_step or step.is_workflow
self.wf_steps[step.name_in_workflow] = step
|
def get_pokemon_by_number(self, number):
"""
Returns an array of Pokemon objects containing all the forms of the
Pokemon specified the Pokedex number.
"""
endpoint = '/pokemon/' + str(number)
return self.make_request(self.BASE_URL + endpoint)
|
def function[get_pokemon_by_number, parameter[self, number]]:
constant[
Returns an array of Pokemon objects containing all the forms of the
Pokemon specified the Pokedex number.
]
variable[endpoint] assign[=] binary_operation[constant[/pokemon/] + call[name[str], parameter[name[number]]]]
return[call[name[self].make_request, parameter[binary_operation[name[self].BASE_URL + name[endpoint]]]]]
|
keyword[def] identifier[get_pokemon_by_number] ( identifier[self] , identifier[number] ):
literal[string]
identifier[endpoint] = literal[string] + identifier[str] ( identifier[number] )
keyword[return] identifier[self] . identifier[make_request] ( identifier[self] . identifier[BASE_URL] + identifier[endpoint] )
|
def get_pokemon_by_number(self, number):
"""
Returns an array of Pokemon objects containing all the forms of the
Pokemon specified the Pokedex number.
"""
endpoint = '/pokemon/' + str(number)
return self.make_request(self.BASE_URL + endpoint)
|
async def _readline(self, reader):
"""
Readline helper
"""
ret = await reader.readline()
if len(ret) == 0 and reader.at_eof():
raise EOFError()
return ret
|
<ast.AsyncFunctionDef object at 0x7da1b1caf700>
|
keyword[async] keyword[def] identifier[_readline] ( identifier[self] , identifier[reader] ):
literal[string]
identifier[ret] = keyword[await] identifier[reader] . identifier[readline] ()
keyword[if] identifier[len] ( identifier[ret] )== literal[int] keyword[and] identifier[reader] . identifier[at_eof] ():
keyword[raise] identifier[EOFError] ()
keyword[return] identifier[ret]
|
async def _readline(self, reader):
"""
Readline helper
"""
ret = await reader.readline()
if len(ret) == 0 and reader.at_eof():
raise EOFError() # depends on [control=['if'], data=[]]
return ret
|
def add_missing_children(models,request,include_children_for,modelgb):
"helper for do_check. mutates request"
for (nombre,pkey),model in models.items():
for modelclass,pkeys in model.refkeys(include_children_for.get(nombre,())).items():
# warning: this is defaulting to all fields of child object. don't give clients a way to restrict that until there's a reason to.
childname=modelgb['row',modelclass].name
for childfield,cftype in modelclass.FIELDS:
if not isinstance(cftype,basestring) and inspect.isclass(cftype) and issubclass(cftype,syncschema.Syncable):
merge_null_missing(request,childname,childfield,pkeys)
elif childfield in modelclass.SENDRAW: merge_null_missing(request,childname,childfield,pkeys)
else: pass # intentional: ignore the field
return request # the in-place updated original
|
def function[add_missing_children, parameter[models, request, include_children_for, modelgb]]:
constant[helper for do_check. mutates request]
for taget[tuple[[<ast.Tuple object at 0x7da2044c1cf0>, <ast.Name object at 0x7da2044c26e0>]]] in starred[call[name[models].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2044c0fd0>, <ast.Name object at 0x7da2044c0070>]]] in starred[call[call[name[model].refkeys, parameter[call[name[include_children_for].get, parameter[name[nombre], tuple[[]]]]]].items, parameter[]]] begin[:]
variable[childname] assign[=] call[name[modelgb]][tuple[[<ast.Constant object at 0x7da2044c04f0>, <ast.Name object at 0x7da2044c3a60>]]].name
for taget[tuple[[<ast.Name object at 0x7da2044c2500>, <ast.Name object at 0x7da2044c3070>]]] in starred[name[modelclass].FIELDS] begin[:]
if <ast.BoolOp object at 0x7da2044c2740> begin[:]
call[name[merge_null_missing], parameter[name[request], name[childname], name[childfield], name[pkeys]]]
return[name[request]]
|
keyword[def] identifier[add_missing_children] ( identifier[models] , identifier[request] , identifier[include_children_for] , identifier[modelgb] ):
literal[string]
keyword[for] ( identifier[nombre] , identifier[pkey] ), identifier[model] keyword[in] identifier[models] . identifier[items] ():
keyword[for] identifier[modelclass] , identifier[pkeys] keyword[in] identifier[model] . identifier[refkeys] ( identifier[include_children_for] . identifier[get] ( identifier[nombre] ,())). identifier[items] ():
identifier[childname] = identifier[modelgb] [ literal[string] , identifier[modelclass] ]. identifier[name]
keyword[for] identifier[childfield] , identifier[cftype] keyword[in] identifier[modelclass] . identifier[FIELDS] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[cftype] , identifier[basestring] ) keyword[and] identifier[inspect] . identifier[isclass] ( identifier[cftype] ) keyword[and] identifier[issubclass] ( identifier[cftype] , identifier[syncschema] . identifier[Syncable] ):
identifier[merge_null_missing] ( identifier[request] , identifier[childname] , identifier[childfield] , identifier[pkeys] )
keyword[elif] identifier[childfield] keyword[in] identifier[modelclass] . identifier[SENDRAW] : identifier[merge_null_missing] ( identifier[request] , identifier[childname] , identifier[childfield] , identifier[pkeys] )
keyword[else] : keyword[pass]
keyword[return] identifier[request]
|
def add_missing_children(models, request, include_children_for, modelgb):
"""helper for do_check. mutates request"""
for ((nombre, pkey), model) in models.items():
for (modelclass, pkeys) in model.refkeys(include_children_for.get(nombre, ())).items():
# warning: this is defaulting to all fields of child object. don't give clients a way to restrict that until there's a reason to.
childname = modelgb['row', modelclass].name
for (childfield, cftype) in modelclass.FIELDS:
if not isinstance(cftype, basestring) and inspect.isclass(cftype) and issubclass(cftype, syncschema.Syncable):
merge_null_missing(request, childname, childfield, pkeys) # depends on [control=['if'], data=[]]
elif childfield in modelclass.SENDRAW:
merge_null_missing(request, childname, childfield, pkeys) # depends on [control=['if'], data=['childfield']]
else:
pass # intentional: ignore the field # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return request # the in-place updated original
|
def _GetKeysDefaultEmpty(self, top_level, keys, depth=1):
"""Retrieves plist keys, defaulting to empty values.
Args:
top_level (plistlib._InternalDict): top level plist object.
keys (set[str]): names of keys that should be returned.
depth (int): depth within the plist, where 1 is top level.
Returns:
dict[str, str]: values of the requested keys.
"""
keys = set(keys)
match = {}
if depth == 1:
for key in keys:
value = top_level.get(key, None)
if value is not None:
match[key] = value
else:
for _, parsed_key, parsed_value in plist_interface.RecurseKey(
top_level, depth=depth):
if parsed_key in keys:
match[parsed_key] = parsed_value
if set(match.keys()) == keys:
return match
return match
|
def function[_GetKeysDefaultEmpty, parameter[self, top_level, keys, depth]]:
constant[Retrieves plist keys, defaulting to empty values.
Args:
top_level (plistlib._InternalDict): top level plist object.
keys (set[str]): names of keys that should be returned.
depth (int): depth within the plist, where 1 is top level.
Returns:
dict[str, str]: values of the requested keys.
]
variable[keys] assign[=] call[name[set], parameter[name[keys]]]
variable[match] assign[=] dictionary[[], []]
if compare[name[depth] equal[==] constant[1]] begin[:]
for taget[name[key]] in starred[name[keys]] begin[:]
variable[value] assign[=] call[name[top_level].get, parameter[name[key], constant[None]]]
if compare[name[value] is_not constant[None]] begin[:]
call[name[match]][name[key]] assign[=] name[value]
return[name[match]]
|
keyword[def] identifier[_GetKeysDefaultEmpty] ( identifier[self] , identifier[top_level] , identifier[keys] , identifier[depth] = literal[int] ):
literal[string]
identifier[keys] = identifier[set] ( identifier[keys] )
identifier[match] ={}
keyword[if] identifier[depth] == literal[int] :
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[value] = identifier[top_level] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[match] [ identifier[key] ]= identifier[value]
keyword[else] :
keyword[for] identifier[_] , identifier[parsed_key] , identifier[parsed_value] keyword[in] identifier[plist_interface] . identifier[RecurseKey] (
identifier[top_level] , identifier[depth] = identifier[depth] ):
keyword[if] identifier[parsed_key] keyword[in] identifier[keys] :
identifier[match] [ identifier[parsed_key] ]= identifier[parsed_value]
keyword[if] identifier[set] ( identifier[match] . identifier[keys] ())== identifier[keys] :
keyword[return] identifier[match]
keyword[return] identifier[match]
|
def _GetKeysDefaultEmpty(self, top_level, keys, depth=1):
"""Retrieves plist keys, defaulting to empty values.
Args:
top_level (plistlib._InternalDict): top level plist object.
keys (set[str]): names of keys that should be returned.
depth (int): depth within the plist, where 1 is top level.
Returns:
dict[str, str]: values of the requested keys.
"""
keys = set(keys)
match = {}
if depth == 1:
for key in keys:
value = top_level.get(key, None)
if value is not None:
match[key] = value # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
else:
for (_, parsed_key, parsed_value) in plist_interface.RecurseKey(top_level, depth=depth):
if parsed_key in keys:
match[parsed_key] = parsed_value
if set(match.keys()) == keys:
return match # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['parsed_key', 'keys']] # depends on [control=['for'], data=[]]
return match
|
def pstdev(data):
"""Calculates the population standard deviation."""
#: http://stackoverflow.com/a/27758326
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/n # the population variance
return pvar**0.5
|
def function[pstdev, parameter[data]]:
constant[Calculates the population standard deviation.]
variable[n] assign[=] call[name[len], parameter[name[data]]]
if compare[name[n] less[<] constant[2]] begin[:]
<ast.Raise object at 0x7da20c6aaaa0>
variable[ss] assign[=] call[name[_ss], parameter[name[data]]]
variable[pvar] assign[=] binary_operation[name[ss] / name[n]]
return[binary_operation[name[pvar] ** constant[0.5]]]
|
keyword[def] identifier[pstdev] ( identifier[data] ):
literal[string]
identifier[n] = identifier[len] ( identifier[data] )
keyword[if] identifier[n] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[ss] = identifier[_ss] ( identifier[data] )
identifier[pvar] = identifier[ss] / identifier[n]
keyword[return] identifier[pvar] ** literal[int]
|
def pstdev(data):
"""Calculates the population standard deviation."""
#: http://stackoverflow.com/a/27758326
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points') # depends on [control=['if'], data=[]]
ss = _ss(data)
pvar = ss / n # the population variance
return pvar ** 0.5
|
def remove_term(self, t):
"""Only removes top-level terms. Child terms can be removed at the parent. """
try:
self.terms.remove(t)
except ValueError:
pass
if t.section and t.parent_term_lc == 'root':
t.section = self.add_section(t.section)
t.section.remove_term(t, remove_from_doc=False)
if t.parent:
try:
t.parent.remove_child(t)
except ValueError:
pass
|
def function[remove_term, parameter[self, t]]:
constant[Only removes top-level terms. Child terms can be removed at the parent. ]
<ast.Try object at 0x7da18f00e950>
if <ast.BoolOp object at 0x7da20e7484f0> begin[:]
name[t].section assign[=] call[name[self].add_section, parameter[name[t].section]]
call[name[t].section.remove_term, parameter[name[t]]]
if name[t].parent begin[:]
<ast.Try object at 0x7da20e74a890>
|
keyword[def] identifier[remove_term] ( identifier[self] , identifier[t] ):
literal[string]
keyword[try] :
identifier[self] . identifier[terms] . identifier[remove] ( identifier[t] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] identifier[t] . identifier[section] keyword[and] identifier[t] . identifier[parent_term_lc] == literal[string] :
identifier[t] . identifier[section] = identifier[self] . identifier[add_section] ( identifier[t] . identifier[section] )
identifier[t] . identifier[section] . identifier[remove_term] ( identifier[t] , identifier[remove_from_doc] = keyword[False] )
keyword[if] identifier[t] . identifier[parent] :
keyword[try] :
identifier[t] . identifier[parent] . identifier[remove_child] ( identifier[t] )
keyword[except] identifier[ValueError] :
keyword[pass]
|
def remove_term(self, t):
"""Only removes top-level terms. Child terms can be removed at the parent. """
try:
self.terms.remove(t) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
if t.section and t.parent_term_lc == 'root':
t.section = self.add_section(t.section)
t.section.remove_term(t, remove_from_doc=False) # depends on [control=['if'], data=[]]
if t.parent:
try:
t.parent.remove_child(t) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
|
def updateImage(self, imgdata, xaxis=None, yaxis=None):
"""Updates the Widget image directly.
:type imgdata: numpy.ndarray, see :meth:`pyqtgraph:pyqtgraph.ImageItem.setImage`
:param xaxis: x-axis values, length should match dimension 1 of imgdata
:param yaxis: y-axis values, length should match dimension 0 of imgdata
"""
imgdata = imgdata.T
self.img.setImage(imgdata)
if xaxis is not None and yaxis is not None:
xscale = 1.0/(imgdata.shape[0]/xaxis[-1])
yscale = 1.0/(imgdata.shape[1]/yaxis[-1])
self.resetScale()
self.img.scale(xscale, yscale)
self.imgScale = (xscale, yscale)
self.imageArray = np.fliplr(imgdata)
self.updateColormap()
|
def function[updateImage, parameter[self, imgdata, xaxis, yaxis]]:
constant[Updates the Widget image directly.
:type imgdata: numpy.ndarray, see :meth:`pyqtgraph:pyqtgraph.ImageItem.setImage`
:param xaxis: x-axis values, length should match dimension 1 of imgdata
:param yaxis: y-axis values, length should match dimension 0 of imgdata
]
variable[imgdata] assign[=] name[imgdata].T
call[name[self].img.setImage, parameter[name[imgdata]]]
if <ast.BoolOp object at 0x7da20c76c310> begin[:]
variable[xscale] assign[=] binary_operation[constant[1.0] / binary_operation[call[name[imgdata].shape][constant[0]] / call[name[xaxis]][<ast.UnaryOp object at 0x7da20c76f4f0>]]]
variable[yscale] assign[=] binary_operation[constant[1.0] / binary_operation[call[name[imgdata].shape][constant[1]] / call[name[yaxis]][<ast.UnaryOp object at 0x7da20c76e620>]]]
call[name[self].resetScale, parameter[]]
call[name[self].img.scale, parameter[name[xscale], name[yscale]]]
name[self].imgScale assign[=] tuple[[<ast.Name object at 0x7da20c76d150>, <ast.Name object at 0x7da20c76d540>]]
name[self].imageArray assign[=] call[name[np].fliplr, parameter[name[imgdata]]]
call[name[self].updateColormap, parameter[]]
|
keyword[def] identifier[updateImage] ( identifier[self] , identifier[imgdata] , identifier[xaxis] = keyword[None] , identifier[yaxis] = keyword[None] ):
literal[string]
identifier[imgdata] = identifier[imgdata] . identifier[T]
identifier[self] . identifier[img] . identifier[setImage] ( identifier[imgdata] )
keyword[if] identifier[xaxis] keyword[is] keyword[not] keyword[None] keyword[and] identifier[yaxis] keyword[is] keyword[not] keyword[None] :
identifier[xscale] = literal[int] /( identifier[imgdata] . identifier[shape] [ literal[int] ]/ identifier[xaxis] [- literal[int] ])
identifier[yscale] = literal[int] /( identifier[imgdata] . identifier[shape] [ literal[int] ]/ identifier[yaxis] [- literal[int] ])
identifier[self] . identifier[resetScale] ()
identifier[self] . identifier[img] . identifier[scale] ( identifier[xscale] , identifier[yscale] )
identifier[self] . identifier[imgScale] =( identifier[xscale] , identifier[yscale] )
identifier[self] . identifier[imageArray] = identifier[np] . identifier[fliplr] ( identifier[imgdata] )
identifier[self] . identifier[updateColormap] ()
|
def updateImage(self, imgdata, xaxis=None, yaxis=None):
"""Updates the Widget image directly.
:type imgdata: numpy.ndarray, see :meth:`pyqtgraph:pyqtgraph.ImageItem.setImage`
:param xaxis: x-axis values, length should match dimension 1 of imgdata
:param yaxis: y-axis values, length should match dimension 0 of imgdata
"""
imgdata = imgdata.T
self.img.setImage(imgdata)
if xaxis is not None and yaxis is not None:
xscale = 1.0 / (imgdata.shape[0] / xaxis[-1])
yscale = 1.0 / (imgdata.shape[1] / yaxis[-1])
self.resetScale()
self.img.scale(xscale, yscale)
self.imgScale = (xscale, yscale) # depends on [control=['if'], data=[]]
self.imageArray = np.fliplr(imgdata)
self.updateColormap()
|
def _lightness(color, **kwargs):
""" Get lightness value of HSL color.
"""
l = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[1]
return NumberValue((l * 100, '%'))
|
def function[_lightness, parameter[color]]:
constant[ Get lightness value of HSL color.
]
variable[l] assign[=] call[call[name[colorsys].rgb_to_hls, parameter[<ast.Starred object at 0x7da1b27ecd00>]]][constant[1]]
return[call[name[NumberValue], parameter[tuple[[<ast.BinOp object at 0x7da1b27ee350>, <ast.Constant object at 0x7da1b27eedd0>]]]]]
|
keyword[def] identifier[_lightness] ( identifier[color] ,** identifier[kwargs] ):
literal[string]
identifier[l] = identifier[colorsys] . identifier[rgb_to_hls] (*[ identifier[x] / literal[int] keyword[for] identifier[x] keyword[in] identifier[color] . identifier[value] [: literal[int] ]])[ literal[int] ]
keyword[return] identifier[NumberValue] (( identifier[l] * literal[int] , literal[string] ))
|
def _lightness(color, **kwargs):
""" Get lightness value of HSL color.
"""
l = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[1]
return NumberValue((l * 100, '%'))
|
def _prepare_servers(self):
"""
Prepare the variables that are exposed to the servers.
Most attributes in the server config are used directly. However, due
to variations in how cloud providers treat regions and availability
zones, this method allows either the ``availability_zone`` or the
``region_name`` to be used as the target availability zone for a
server. If both are specified, then ``availability_zone`` is used. If
``availability_zone`` is not specified in the server config, then the
``region_name`` value is used as the target availability zone.
"""
stack = {
A.NAME: self[A.NAME],
A.VERSION: self[A.VERSION],
}
for server in self.get(R.SERVERS, []):
# default cloud values
if A.PROVIDER in server:
if A.server.LAUNCH_TIMEOUT not in server:
server[A.server.LAUNCH_TIMEOUT] = DEFAULT_LAUNCH_TIMEOUT_S
if A.server.POST_DELAY not in server:
server[A.server.POST_DELAY] = DEFAULT_LAUNCH_TIMEOUT_S
if A.server.AZ not in server:
server[A.server.AZ] = server[A.server.REGION]
# distribute the config scope attributes
svars = {
A.STACK: stack,
A.SERVER_CLASS: server[A.NAME],
}
for scope in server.get(A.server.SCOPES, []):
# allow scopes to be defined inline
if isinstance(scope, collections.Mapping):
svars.update(scope)
else:
svars[scope] = self[scope]
# make all of the launch-time attributes (e.g. disk_image_id,
# launch_timeout_s, ssh_key_name, etc...) available as facts in
# case you need them in a playbook.
sattrs = server.copy()
sattrs.pop(A.server.SCOPES, None)
svars[A.server.BANG_ATTRS] = sattrs
server[A.server.VARS] = svars
|
def function[_prepare_servers, parameter[self]]:
constant[
Prepare the variables that are exposed to the servers.
Most attributes in the server config are used directly. However, due
to variations in how cloud providers treat regions and availability
zones, this method allows either the ``availability_zone`` or the
``region_name`` to be used as the target availability zone for a
server. If both are specified, then ``availability_zone`` is used. If
``availability_zone`` is not specified in the server config, then the
``region_name`` value is used as the target availability zone.
]
variable[stack] assign[=] dictionary[[<ast.Attribute object at 0x7da1b1436740>, <ast.Attribute object at 0x7da1b1436e00>], [<ast.Subscript object at 0x7da1b14358d0>, <ast.Subscript object at 0x7da1b1436350>]]
for taget[name[server]] in starred[call[name[self].get, parameter[name[R].SERVERS, list[[]]]]] begin[:]
if compare[name[A].PROVIDER in name[server]] begin[:]
if compare[name[A].server.LAUNCH_TIMEOUT <ast.NotIn object at 0x7da2590d7190> name[server]] begin[:]
call[name[server]][name[A].server.LAUNCH_TIMEOUT] assign[=] name[DEFAULT_LAUNCH_TIMEOUT_S]
if compare[name[A].server.POST_DELAY <ast.NotIn object at 0x7da2590d7190> name[server]] begin[:]
call[name[server]][name[A].server.POST_DELAY] assign[=] name[DEFAULT_LAUNCH_TIMEOUT_S]
if compare[name[A].server.AZ <ast.NotIn object at 0x7da2590d7190> name[server]] begin[:]
call[name[server]][name[A].server.AZ] assign[=] call[name[server]][name[A].server.REGION]
variable[svars] assign[=] dictionary[[<ast.Attribute object at 0x7da1b14357b0>, <ast.Attribute object at 0x7da1b1437b20>], [<ast.Name object at 0x7da1b1436770>, <ast.Subscript object at 0x7da1b1437250>]]
for taget[name[scope]] in starred[call[name[server].get, parameter[name[A].server.SCOPES, list[[]]]]] begin[:]
if call[name[isinstance], parameter[name[scope], name[collections].Mapping]] begin[:]
call[name[svars].update, parameter[name[scope]]]
variable[sattrs] assign[=] call[name[server].copy, parameter[]]
call[name[sattrs].pop, parameter[name[A].server.SCOPES, constant[None]]]
call[name[svars]][name[A].server.BANG_ATTRS] assign[=] name[sattrs]
call[name[server]][name[A].server.VARS] assign[=] name[svars]
|
keyword[def] identifier[_prepare_servers] ( identifier[self] ):
literal[string]
identifier[stack] ={
identifier[A] . identifier[NAME] : identifier[self] [ identifier[A] . identifier[NAME] ],
identifier[A] . identifier[VERSION] : identifier[self] [ identifier[A] . identifier[VERSION] ],
}
keyword[for] identifier[server] keyword[in] identifier[self] . identifier[get] ( identifier[R] . identifier[SERVERS] ,[]):
keyword[if] identifier[A] . identifier[PROVIDER] keyword[in] identifier[server] :
keyword[if] identifier[A] . identifier[server] . identifier[LAUNCH_TIMEOUT] keyword[not] keyword[in] identifier[server] :
identifier[server] [ identifier[A] . identifier[server] . identifier[LAUNCH_TIMEOUT] ]= identifier[DEFAULT_LAUNCH_TIMEOUT_S]
keyword[if] identifier[A] . identifier[server] . identifier[POST_DELAY] keyword[not] keyword[in] identifier[server] :
identifier[server] [ identifier[A] . identifier[server] . identifier[POST_DELAY] ]= identifier[DEFAULT_LAUNCH_TIMEOUT_S]
keyword[if] identifier[A] . identifier[server] . identifier[AZ] keyword[not] keyword[in] identifier[server] :
identifier[server] [ identifier[A] . identifier[server] . identifier[AZ] ]= identifier[server] [ identifier[A] . identifier[server] . identifier[REGION] ]
identifier[svars] ={
identifier[A] . identifier[STACK] : identifier[stack] ,
identifier[A] . identifier[SERVER_CLASS] : identifier[server] [ identifier[A] . identifier[NAME] ],
}
keyword[for] identifier[scope] keyword[in] identifier[server] . identifier[get] ( identifier[A] . identifier[server] . identifier[SCOPES] ,[]):
keyword[if] identifier[isinstance] ( identifier[scope] , identifier[collections] . identifier[Mapping] ):
identifier[svars] . identifier[update] ( identifier[scope] )
keyword[else] :
identifier[svars] [ identifier[scope] ]= identifier[self] [ identifier[scope] ]
identifier[sattrs] = identifier[server] . identifier[copy] ()
identifier[sattrs] . identifier[pop] ( identifier[A] . identifier[server] . identifier[SCOPES] , keyword[None] )
identifier[svars] [ identifier[A] . identifier[server] . identifier[BANG_ATTRS] ]= identifier[sattrs]
identifier[server] [ identifier[A] . identifier[server] . identifier[VARS] ]= identifier[svars]
|
def _prepare_servers(self):
"""
Prepare the variables that are exposed to the servers.
Most attributes in the server config are used directly. However, due
to variations in how cloud providers treat regions and availability
zones, this method allows either the ``availability_zone`` or the
``region_name`` to be used as the target availability zone for a
server. If both are specified, then ``availability_zone`` is used. If
``availability_zone`` is not specified in the server config, then the
``region_name`` value is used as the target availability zone.
"""
stack = {A.NAME: self[A.NAME], A.VERSION: self[A.VERSION]}
for server in self.get(R.SERVERS, []):
# default cloud values
if A.PROVIDER in server:
if A.server.LAUNCH_TIMEOUT not in server:
server[A.server.LAUNCH_TIMEOUT] = DEFAULT_LAUNCH_TIMEOUT_S # depends on [control=['if'], data=['server']]
if A.server.POST_DELAY not in server:
server[A.server.POST_DELAY] = DEFAULT_LAUNCH_TIMEOUT_S # depends on [control=['if'], data=['server']]
if A.server.AZ not in server:
server[A.server.AZ] = server[A.server.REGION] # depends on [control=['if'], data=['server']] # depends on [control=['if'], data=['server']]
# distribute the config scope attributes
svars = {A.STACK: stack, A.SERVER_CLASS: server[A.NAME]}
for scope in server.get(A.server.SCOPES, []):
# allow scopes to be defined inline
if isinstance(scope, collections.Mapping):
svars.update(scope) # depends on [control=['if'], data=[]]
else:
svars[scope] = self[scope] # depends on [control=['for'], data=['scope']]
# make all of the launch-time attributes (e.g. disk_image_id,
# launch_timeout_s, ssh_key_name, etc...) available as facts in
# case you need them in a playbook.
sattrs = server.copy()
sattrs.pop(A.server.SCOPES, None)
svars[A.server.BANG_ATTRS] = sattrs
server[A.server.VARS] = svars # depends on [control=['for'], data=['server']]
|
def smooth_magseries_gaussfilt(mags, windowsize, windowfwhm=7):
'''This smooths the magseries with a Gaussian kernel.
Parameters
----------
mags : np.array
The input mags/flux time-series to smooth.
windowsize : int
This is a odd integer containing the smoothing window size.
windowfwhm : int
This is an odd integer containing the FWHM of the applied Gaussian
window function.
Returns
-------
np.array
The smoothed mag/flux time-series array.
'''
convkernel = Gaussian1DKernel(windowfwhm, x_size=windowsize)
smoothed = convolve(mags, convkernel, boundary='extend')
return smoothed
|
def function[smooth_magseries_gaussfilt, parameter[mags, windowsize, windowfwhm]]:
constant[This smooths the magseries with a Gaussian kernel.
Parameters
----------
mags : np.array
The input mags/flux time-series to smooth.
windowsize : int
This is a odd integer containing the smoothing window size.
windowfwhm : int
This is an odd integer containing the FWHM of the applied Gaussian
window function.
Returns
-------
np.array
The smoothed mag/flux time-series array.
]
variable[convkernel] assign[=] call[name[Gaussian1DKernel], parameter[name[windowfwhm]]]
variable[smoothed] assign[=] call[name[convolve], parameter[name[mags], name[convkernel]]]
return[name[smoothed]]
|
keyword[def] identifier[smooth_magseries_gaussfilt] ( identifier[mags] , identifier[windowsize] , identifier[windowfwhm] = literal[int] ):
literal[string]
identifier[convkernel] = identifier[Gaussian1DKernel] ( identifier[windowfwhm] , identifier[x_size] = identifier[windowsize] )
identifier[smoothed] = identifier[convolve] ( identifier[mags] , identifier[convkernel] , identifier[boundary] = literal[string] )
keyword[return] identifier[smoothed]
|
def smooth_magseries_gaussfilt(mags, windowsize, windowfwhm=7):
"""This smooths the magseries with a Gaussian kernel.
Parameters
----------
mags : np.array
The input mags/flux time-series to smooth.
windowsize : int
This is a odd integer containing the smoothing window size.
windowfwhm : int
This is an odd integer containing the FWHM of the applied Gaussian
window function.
Returns
-------
np.array
The smoothed mag/flux time-series array.
"""
convkernel = Gaussian1DKernel(windowfwhm, x_size=windowsize)
smoothed = convolve(mags, convkernel, boundary='extend')
return smoothed
|
def status_codes_chart():
"""Chart for status codes."""
stats = status_codes_stats()
chart_options = {
'chart': {
'type': 'pie'
},
'title': {
'text': ''
},
'subtitle': {
'text': ''
},
'tooltip': {
'formatter': "return this.y + '/' + this.total + ' (' + "
"Highcharts.numberFormat(this.percentage, 1) + '%)';"
},
'legend': {
'enabled': True,
},
'plotOptions': {
'pie': {
'allowPointSelect': True,
'cursor': 'pointer',
'dataLabels': {
'enabled': True,
'format': '<b>{point.name}</b>: {point.y}/{point.total} '
'({point.percentage:.1f}%)'
},
'showInLegend': True
}
},
'series': [{
'name': _('Status Codes'),
'colorByPoint': True,
'data': sorted(
[{'name': '%s %s' % (k, STATUS_CODES[int(k)]['name']), 'y': v}
for k, v in stats.items()],
key=lambda x: x['y'],
reverse=True)
}]
}
return chart_options
|
def function[status_codes_chart, parameter[]]:
constant[Chart for status codes.]
variable[stats] assign[=] call[name[status_codes_stats], parameter[]]
variable[chart_options] assign[=] dictionary[[<ast.Constant object at 0x7da1b25589d0>, <ast.Constant object at 0x7da1b2559f30>, <ast.Constant object at 0x7da1b255bb20>, <ast.Constant object at 0x7da1b255ada0>, <ast.Constant object at 0x7da1b255ad70>, <ast.Constant object at 0x7da1b255aec0>, <ast.Constant object at 0x7da1b2558bb0>], [<ast.Dict object at 0x7da1b2558700>, <ast.Dict object at 0x7da1b2558d00>, <ast.Dict object at 0x7da1b255bca0>, <ast.Dict object at 0x7da1b2558e50>, <ast.Dict object at 0x7da1b2559db0>, <ast.Dict object at 0x7da1b255ae00>, <ast.List object at 0x7da1b2559de0>]]
return[name[chart_options]]
|
keyword[def] identifier[status_codes_chart] ():
literal[string]
identifier[stats] = identifier[status_codes_stats] ()
identifier[chart_options] ={
literal[string] :{
literal[string] : literal[string]
},
literal[string] :{
literal[string] : literal[string]
},
literal[string] :{
literal[string] : literal[string]
},
literal[string] :{
literal[string] : literal[string]
literal[string]
},
literal[string] :{
literal[string] : keyword[True] ,
},
literal[string] :{
literal[string] :{
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] :{
literal[string] : keyword[True] ,
literal[string] : literal[string]
literal[string]
},
literal[string] : keyword[True]
}
},
literal[string] :[{
literal[string] : identifier[_] ( literal[string] ),
literal[string] : keyword[True] ,
literal[string] : identifier[sorted] (
[{ literal[string] : literal[string] %( identifier[k] , identifier[STATUS_CODES] [ identifier[int] ( identifier[k] )][ literal[string] ]), literal[string] : identifier[v] }
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[stats] . identifier[items] ()],
identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ],
identifier[reverse] = keyword[True] )
}]
}
keyword[return] identifier[chart_options]
|
def status_codes_chart():
"""Chart for status codes."""
stats = status_codes_stats()
chart_options = {'chart': {'type': 'pie'}, 'title': {'text': ''}, 'subtitle': {'text': ''}, 'tooltip': {'formatter': "return this.y + '/' + this.total + ' (' + Highcharts.numberFormat(this.percentage, 1) + '%)';"}, 'legend': {'enabled': True}, 'plotOptions': {'pie': {'allowPointSelect': True, 'cursor': 'pointer', 'dataLabels': {'enabled': True, 'format': '<b>{point.name}</b>: {point.y}/{point.total} ({point.percentage:.1f}%)'}, 'showInLegend': True}}, 'series': [{'name': _('Status Codes'), 'colorByPoint': True, 'data': sorted([{'name': '%s %s' % (k, STATUS_CODES[int(k)]['name']), 'y': v} for (k, v) in stats.items()], key=lambda x: x['y'], reverse=True)}]}
return chart_options
|
def _get_nsymop(self):
"""Returns total number of symmetry operations."""
if self.centrosymmetric:
return 2 * len(self._rotations) * len(self._subtrans)
else:
return len(self._rotations) * len(self._subtrans)
|
def function[_get_nsymop, parameter[self]]:
constant[Returns total number of symmetry operations.]
if name[self].centrosymmetric begin[:]
return[binary_operation[binary_operation[constant[2] * call[name[len], parameter[name[self]._rotations]]] * call[name[len], parameter[name[self]._subtrans]]]]
|
keyword[def] identifier[_get_nsymop] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[centrosymmetric] :
keyword[return] literal[int] * identifier[len] ( identifier[self] . identifier[_rotations] )* identifier[len] ( identifier[self] . identifier[_subtrans] )
keyword[else] :
keyword[return] identifier[len] ( identifier[self] . identifier[_rotations] )* identifier[len] ( identifier[self] . identifier[_subtrans] )
|
def _get_nsymop(self):
"""Returns total number of symmetry operations."""
if self.centrosymmetric:
return 2 * len(self._rotations) * len(self._subtrans) # depends on [control=['if'], data=[]]
else:
return len(self._rotations) * len(self._subtrans)
|
def set_object_acl(self, obj):
""" Set object ACL on creation if not already present. """
if not obj._acl:
from nefertari_guards import engine as guards_engine
acl = self._factory(self.request).generate_item_acl(obj)
obj._acl = guards_engine.ACLField.stringify_acl(acl)
|
def function[set_object_acl, parameter[self, obj]]:
constant[ Set object ACL on creation if not already present. ]
if <ast.UnaryOp object at 0x7da1b1123850> begin[:]
from relative_module[nefertari_guards] import module[engine]
variable[acl] assign[=] call[call[name[self]._factory, parameter[name[self].request]].generate_item_acl, parameter[name[obj]]]
name[obj]._acl assign[=] call[name[guards_engine].ACLField.stringify_acl, parameter[name[acl]]]
|
keyword[def] identifier[set_object_acl] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] keyword[not] identifier[obj] . identifier[_acl] :
keyword[from] identifier[nefertari_guards] keyword[import] identifier[engine] keyword[as] identifier[guards_engine]
identifier[acl] = identifier[self] . identifier[_factory] ( identifier[self] . identifier[request] ). identifier[generate_item_acl] ( identifier[obj] )
identifier[obj] . identifier[_acl] = identifier[guards_engine] . identifier[ACLField] . identifier[stringify_acl] ( identifier[acl] )
|
def set_object_acl(self, obj):
""" Set object ACL on creation if not already present. """
if not obj._acl:
from nefertari_guards import engine as guards_engine
acl = self._factory(self.request).generate_item_acl(obj)
obj._acl = guards_engine.ACLField.stringify_acl(acl) # depends on [control=['if'], data=[]]
|
def update(self, query, doc, *args, **kwargs):
"""BAckwards compatibility with update"""
if isinstance(doc, list):
return [
self.update_one(query, item, *args, **kwargs)
for item in doc
]
else:
return self.update_one(query, doc, *args, **kwargs)
|
def function[update, parameter[self, query, doc]]:
constant[BAckwards compatibility with update]
if call[name[isinstance], parameter[name[doc], name[list]]] begin[:]
return[<ast.ListComp object at 0x7da20e9541f0>]
|
keyword[def] identifier[update] ( identifier[self] , identifier[query] , identifier[doc] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[doc] , identifier[list] ):
keyword[return] [
identifier[self] . identifier[update_one] ( identifier[query] , identifier[item] ,* identifier[args] ,** identifier[kwargs] )
keyword[for] identifier[item] keyword[in] identifier[doc]
]
keyword[else] :
keyword[return] identifier[self] . identifier[update_one] ( identifier[query] , identifier[doc] ,* identifier[args] ,** identifier[kwargs] )
|
def update(self, query, doc, *args, **kwargs):
"""BAckwards compatibility with update"""
if isinstance(doc, list):
return [self.update_one(query, item, *args, **kwargs) for item in doc] # depends on [control=['if'], data=[]]
else:
return self.update_one(query, doc, *args, **kwargs)
|
def enum_value(self):
"""Return the value of an enum constant."""
if not hasattr(self, '_enum_value'):
assert self.kind == CursorKind.ENUM_CONSTANT_DECL
# Figure out the underlying type of the enum to know if it
# is a signed or unsigned quantity.
underlying_type = self.type
if underlying_type.kind == TypeKind.ENUM:
underlying_type = underlying_type.get_declaration().enum_type
if underlying_type.kind in (TypeKind.CHAR_U,
TypeKind.UCHAR,
TypeKind.CHAR16,
TypeKind.CHAR32,
TypeKind.USHORT,
TypeKind.UINT,
TypeKind.ULONG,
TypeKind.ULONGLONG,
TypeKind.UINT128):
self._enum_value = \
conf.lib.clang_getEnumConstantDeclUnsignedValue(self)
else:
self._enum_value = conf.lib.clang_getEnumConstantDeclValue(self)
return self._enum_value
|
def function[enum_value, parameter[self]]:
constant[Return the value of an enum constant.]
if <ast.UnaryOp object at 0x7da18f09ce80> begin[:]
assert[compare[name[self].kind equal[==] name[CursorKind].ENUM_CONSTANT_DECL]]
variable[underlying_type] assign[=] name[self].type
if compare[name[underlying_type].kind equal[==] name[TypeKind].ENUM] begin[:]
variable[underlying_type] assign[=] call[name[underlying_type].get_declaration, parameter[]].enum_type
if compare[name[underlying_type].kind in tuple[[<ast.Attribute object at 0x7da18f09d6f0>, <ast.Attribute object at 0x7da18f09ce50>, <ast.Attribute object at 0x7da1b2345450>, <ast.Attribute object at 0x7da1b2345cf0>, <ast.Attribute object at 0x7da1b2347d90>, <ast.Attribute object at 0x7da1b2345ba0>, <ast.Attribute object at 0x7da1b2347af0>, <ast.Attribute object at 0x7da1b2346500>, <ast.Attribute object at 0x7da1b23471f0>]]] begin[:]
name[self]._enum_value assign[=] call[name[conf].lib.clang_getEnumConstantDeclUnsignedValue, parameter[name[self]]]
return[name[self]._enum_value]
|
keyword[def] identifier[enum_value] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[assert] identifier[self] . identifier[kind] == identifier[CursorKind] . identifier[ENUM_CONSTANT_DECL]
identifier[underlying_type] = identifier[self] . identifier[type]
keyword[if] identifier[underlying_type] . identifier[kind] == identifier[TypeKind] . identifier[ENUM] :
identifier[underlying_type] = identifier[underlying_type] . identifier[get_declaration] (). identifier[enum_type]
keyword[if] identifier[underlying_type] . identifier[kind] keyword[in] ( identifier[TypeKind] . identifier[CHAR_U] ,
identifier[TypeKind] . identifier[UCHAR] ,
identifier[TypeKind] . identifier[CHAR16] ,
identifier[TypeKind] . identifier[CHAR32] ,
identifier[TypeKind] . identifier[USHORT] ,
identifier[TypeKind] . identifier[UINT] ,
identifier[TypeKind] . identifier[ULONG] ,
identifier[TypeKind] . identifier[ULONGLONG] ,
identifier[TypeKind] . identifier[UINT128] ):
identifier[self] . identifier[_enum_value] = identifier[conf] . identifier[lib] . identifier[clang_getEnumConstantDeclUnsignedValue] ( identifier[self] )
keyword[else] :
identifier[self] . identifier[_enum_value] = identifier[conf] . identifier[lib] . identifier[clang_getEnumConstantDeclValue] ( identifier[self] )
keyword[return] identifier[self] . identifier[_enum_value]
|
def enum_value(self):
"""Return the value of an enum constant."""
if not hasattr(self, '_enum_value'):
assert self.kind == CursorKind.ENUM_CONSTANT_DECL
# Figure out the underlying type of the enum to know if it
# is a signed or unsigned quantity.
underlying_type = self.type
if underlying_type.kind == TypeKind.ENUM:
underlying_type = underlying_type.get_declaration().enum_type # depends on [control=['if'], data=[]]
if underlying_type.kind in (TypeKind.CHAR_U, TypeKind.UCHAR, TypeKind.CHAR16, TypeKind.CHAR32, TypeKind.USHORT, TypeKind.UINT, TypeKind.ULONG, TypeKind.ULONGLONG, TypeKind.UINT128):
self._enum_value = conf.lib.clang_getEnumConstantDeclUnsignedValue(self) # depends on [control=['if'], data=[]]
else:
self._enum_value = conf.lib.clang_getEnumConstantDeclValue(self) # depends on [control=['if'], data=[]]
return self._enum_value
|
def execute(self, task):
"""Execute a task.
"""
taskid = str(task)
res = None
try:
# Try to run the task. If we catch an exception, then
# it becomes the result.
self.time_start = time.time()
self.setstatus('executing %s' % taskid)
self.logger.debug("now executing task '%s'" % taskid)
try:
res = task.execute()
except UserTaskException as e:
res = e
except Exception as e:
self.logger.error("Task '%s' raised exception: %s" %
(str(task), str(e)))
res = e
try:
(type, value, tb) = sys.exc_info()
self.logger.debug("Traceback:\n%s" %
"".join(traceback.format_tb(tb)))
# NOTE: to avoid creating a cycle that might cause
# problems for GC--see Python library doc for sys
# module
tb = None
except Exception as e:
self.logger.debug("Traceback information unavailable.")
finally:
self.logger.debug("done executing task '%s'" % str(task))
self.setstatus('cleaning %s' % taskid)
# Wake up waiters on other threads
task.done(res, noraise=True)
self.time_start = 0.0
self.setstatus('idle')
|
def function[execute, parameter[self, task]]:
constant[Execute a task.
]
variable[taskid] assign[=] call[name[str], parameter[name[task]]]
variable[res] assign[=] constant[None]
<ast.Try object at 0x7da18bcc95d0>
|
keyword[def] identifier[execute] ( identifier[self] , identifier[task] ):
literal[string]
identifier[taskid] = identifier[str] ( identifier[task] )
identifier[res] = keyword[None]
keyword[try] :
identifier[self] . identifier[time_start] = identifier[time] . identifier[time] ()
identifier[self] . identifier[setstatus] ( literal[string] % identifier[taskid] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] % identifier[taskid] )
keyword[try] :
identifier[res] = identifier[task] . identifier[execute] ()
keyword[except] identifier[UserTaskException] keyword[as] identifier[e] :
identifier[res] = identifier[e]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] %
( identifier[str] ( identifier[task] ), identifier[str] ( identifier[e] )))
identifier[res] = identifier[e]
keyword[try] :
( identifier[type] , identifier[value] , identifier[tb] )= identifier[sys] . identifier[exc_info] ()
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %
literal[string] . identifier[join] ( identifier[traceback] . identifier[format_tb] ( identifier[tb] )))
identifier[tb] = keyword[None]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[finally] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] % identifier[str] ( identifier[task] ))
identifier[self] . identifier[setstatus] ( literal[string] % identifier[taskid] )
identifier[task] . identifier[done] ( identifier[res] , identifier[noraise] = keyword[True] )
identifier[self] . identifier[time_start] = literal[int]
identifier[self] . identifier[setstatus] ( literal[string] )
|
def execute(self, task):
"""Execute a task.
"""
taskid = str(task)
res = None
try:
# Try to run the task. If we catch an exception, then
# it becomes the result.
self.time_start = time.time()
self.setstatus('executing %s' % taskid)
self.logger.debug("now executing task '%s'" % taskid)
try:
res = task.execute() # depends on [control=['try'], data=[]]
except UserTaskException as e:
res = e # depends on [control=['except'], data=['e']]
except Exception as e:
self.logger.error("Task '%s' raised exception: %s" % (str(task), str(e)))
res = e
try:
(type, value, tb) = sys.exc_info()
self.logger.debug('Traceback:\n%s' % ''.join(traceback.format_tb(tb)))
# NOTE: to avoid creating a cycle that might cause
# problems for GC--see Python library doc for sys
# module
tb = None # depends on [control=['try'], data=[]]
except Exception as e:
self.logger.debug('Traceback information unavailable.') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['try'], data=[]]
finally:
self.logger.debug("done executing task '%s'" % str(task))
self.setstatus('cleaning %s' % taskid)
# Wake up waiters on other threads
task.done(res, noraise=True)
self.time_start = 0.0
self.setstatus('idle')
|
def __check_mem(self):
''' raise exception on RAM exceeded '''
mem_free = psutil.virtual_memory().available / 2**20
self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError(
"Not enough resources: free memory less "
"than %sMB: %sMB" % (self.mem_limit, mem_free))
|
def function[__check_mem, parameter[self]]:
constant[ raise exception on RAM exceeded ]
variable[mem_free] assign[=] binary_operation[call[name[psutil].virtual_memory, parameter[]].available / binary_operation[constant[2] ** constant[20]]]
call[name[self].log.debug, parameter[constant[Memory free: %s/%s], name[mem_free], name[self].mem_limit]]
if compare[name[mem_free] less[<] name[self].mem_limit] begin[:]
<ast.Raise object at 0x7da1b0316d40>
|
keyword[def] identifier[__check_mem] ( identifier[self] ):
literal[string]
identifier[mem_free] = identifier[psutil] . identifier[virtual_memory] (). identifier[available] / literal[int] ** literal[int]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[mem_free] , identifier[self] . identifier[mem_limit] )
keyword[if] identifier[mem_free] < identifier[self] . identifier[mem_limit] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] %( identifier[self] . identifier[mem_limit] , identifier[mem_free] ))
|
def __check_mem(self):
""" raise exception on RAM exceeded """
mem_free = psutil.virtual_memory().available / 2 ** 20
self.log.debug('Memory free: %s/%s', mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError('Not enough resources: free memory less than %sMB: %sMB' % (self.mem_limit, mem_free)) # depends on [control=['if'], data=['mem_free']]
|
async def apply_commandline(self, cmdline):
"""
interprets a command line string
i.e., splits it into separate command strings,
instanciates :class:`Commands <alot.commands.Command>`
accordingly and applies then in sequence.
:param cmdline: command line to interpret
:type cmdline: str
"""
# remove initial spaces
cmdline = cmdline.lstrip()
# we pass Commands one by one to `self.apply_command`.
# To properly call them in sequence, even if they trigger asyncronous
# code (return Deferreds), these applications happen in individual
# callback functions which are then used as callback chain to some
# trivial Deferred that immediately calls its first callback. This way,
# one callback may return a Deferred and thus postpone the application
# of the next callback (and thus Command-application)
def apply_this_command(cmdstring):
logging.debug('%s command string: "%s"', self.mode, str(cmdstring))
# translate cmdstring into :class:`Command`
cmd = commandfactory(cmdstring, self.mode)
# store cmdline for use with 'repeat' command
if cmd.repeatable:
self.last_commandline = cmdline
return self.apply_command(cmd)
try:
for c in split_commandline(cmdline):
await apply_this_command(c)
except Exception as e:
self._error_handler(e)
|
<ast.AsyncFunctionDef object at 0x7da1b080b5e0>
|
keyword[async] keyword[def] identifier[apply_commandline] ( identifier[self] , identifier[cmdline] ):
literal[string]
identifier[cmdline] = identifier[cmdline] . identifier[lstrip] ()
keyword[def] identifier[apply_this_command] ( identifier[cmdstring] ):
identifier[logging] . identifier[debug] ( literal[string] , identifier[self] . identifier[mode] , identifier[str] ( identifier[cmdstring] ))
identifier[cmd] = identifier[commandfactory] ( identifier[cmdstring] , identifier[self] . identifier[mode] )
keyword[if] identifier[cmd] . identifier[repeatable] :
identifier[self] . identifier[last_commandline] = identifier[cmdline]
keyword[return] identifier[self] . identifier[apply_command] ( identifier[cmd] )
keyword[try] :
keyword[for] identifier[c] keyword[in] identifier[split_commandline] ( identifier[cmdline] ):
keyword[await] identifier[apply_this_command] ( identifier[c] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[_error_handler] ( identifier[e] )
|
async def apply_commandline(self, cmdline):
"""
interprets a command line string
i.e., splits it into separate command strings,
instanciates :class:`Commands <alot.commands.Command>`
accordingly and applies then in sequence.
:param cmdline: command line to interpret
:type cmdline: str
"""
# remove initial spaces
cmdline = cmdline.lstrip()
# we pass Commands one by one to `self.apply_command`.
# To properly call them in sequence, even if they trigger asyncronous
# code (return Deferreds), these applications happen in individual
# callback functions which are then used as callback chain to some
# trivial Deferred that immediately calls its first callback. This way,
# one callback may return a Deferred and thus postpone the application
# of the next callback (and thus Command-application)
def apply_this_command(cmdstring):
logging.debug('%s command string: "%s"', self.mode, str(cmdstring))
# translate cmdstring into :class:`Command`
cmd = commandfactory(cmdstring, self.mode)
# store cmdline for use with 'repeat' command
if cmd.repeatable:
self.last_commandline = cmdline # depends on [control=['if'], data=[]]
return self.apply_command(cmd)
try:
for c in split_commandline(cmdline):
await apply_this_command(c) # depends on [control=['for'], data=['c']] # depends on [control=['try'], data=[]]
except Exception as e:
self._error_handler(e) # depends on [control=['except'], data=['e']]
|
def accounts(self):
"""
:rtype: twilio.rest.api.v2010.account.AccountList
"""
if self._accounts is None:
self._accounts = AccountList(self)
return self._accounts
|
def function[accounts, parameter[self]]:
constant[
:rtype: twilio.rest.api.v2010.account.AccountList
]
if compare[name[self]._accounts is constant[None]] begin[:]
name[self]._accounts assign[=] call[name[AccountList], parameter[name[self]]]
return[name[self]._accounts]
|
keyword[def] identifier[accounts] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_accounts] keyword[is] keyword[None] :
identifier[self] . identifier[_accounts] = identifier[AccountList] ( identifier[self] )
keyword[return] identifier[self] . identifier[_accounts]
|
def accounts(self):
"""
:rtype: twilio.rest.api.v2010.account.AccountList
"""
if self._accounts is None:
self._accounts = AccountList(self) # depends on [control=['if'], data=[]]
return self._accounts
|
def links(self,**args):
'''
Return Gist URL-Link, Clone-Link and Script-Link to embed
'''
if 'name' in args:
self.gist_name = args['name']
self.gist_id = self.getMyID(self.gist_name)
elif 'id' in args:
self.gist_id = args['id']
else:
raise Exception('Gist Name/ID must be provided')
if self.gist_id:
r = requests.get(
'%s/gists/%s'%(BASE_URL,self.gist_id),
headers=self.gist.header,
)
if (r.status_code == 200):
content = {
'Github-User': r.json()['user']['login'],
'GistID': r.json()['id'],
'Gist-Link': '%s/%s/%s' %(GIST_URL,self.gist.username,r.json()['id']),
'Clone-Link': '%s/%s.git' %(GIST_URL,r.json()['id']),
'Embed-Script': '<script src="%s/%s/%s.js"</script>' %(GIST_URL,self.gist.username,r.json()['id'])
}
return content
raise Exception('No such gist found')
|
def function[links, parameter[self]]:
constant[
Return Gist URL-Link, Clone-Link and Script-Link to embed
]
if compare[constant[name] in name[args]] begin[:]
name[self].gist_name assign[=] call[name[args]][constant[name]]
name[self].gist_id assign[=] call[name[self].getMyID, parameter[name[self].gist_name]]
if name[self].gist_id begin[:]
variable[r] assign[=] call[name[requests].get, parameter[binary_operation[constant[%s/gists/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00ece0>, <ast.Attribute object at 0x7da18f00ee30>]]]]]
if compare[name[r].status_code equal[==] constant[200]] begin[:]
variable[content] assign[=] dictionary[[<ast.Constant object at 0x7da18f00e620>, <ast.Constant object at 0x7da18f00ef80>, <ast.Constant object at 0x7da18f00d420>, <ast.Constant object at 0x7da18f00ff10>, <ast.Constant object at 0x7da18f00c100>], [<ast.Subscript object at 0x7da18f00f1f0>, <ast.Subscript object at 0x7da18f00d960>, <ast.BinOp object at 0x7da18f00e470>, <ast.BinOp object at 0x7da18f00e110>, <ast.BinOp object at 0x7da18f00cbe0>]]
return[name[content]]
<ast.Raise object at 0x7da204962350>
|
keyword[def] identifier[links] ( identifier[self] ,** identifier[args] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[self] . identifier[gist_name] = identifier[args] [ literal[string] ]
identifier[self] . identifier[gist_id] = identifier[self] . identifier[getMyID] ( identifier[self] . identifier[gist_name] )
keyword[elif] literal[string] keyword[in] identifier[args] :
identifier[self] . identifier[gist_id] = identifier[args] [ literal[string] ]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[self] . identifier[gist_id] :
identifier[r] = identifier[requests] . identifier[get] (
literal[string] %( identifier[BASE_URL] , identifier[self] . identifier[gist_id] ),
identifier[headers] = identifier[self] . identifier[gist] . identifier[header] ,
)
keyword[if] ( identifier[r] . identifier[status_code] == literal[int] ):
identifier[content] ={
literal[string] : identifier[r] . identifier[json] ()[ literal[string] ][ literal[string] ],
literal[string] : identifier[r] . identifier[json] ()[ literal[string] ],
literal[string] : literal[string] %( identifier[GIST_URL] , identifier[self] . identifier[gist] . identifier[username] , identifier[r] . identifier[json] ()[ literal[string] ]),
literal[string] : literal[string] %( identifier[GIST_URL] , identifier[r] . identifier[json] ()[ literal[string] ]),
literal[string] : literal[string] %( identifier[GIST_URL] , identifier[self] . identifier[gist] . identifier[username] , identifier[r] . identifier[json] ()[ literal[string] ])
}
keyword[return] identifier[content]
keyword[raise] identifier[Exception] ( literal[string] )
|
def links(self, **args):
"""
Return Gist URL-Link, Clone-Link and Script-Link to embed
"""
if 'name' in args:
self.gist_name = args['name']
self.gist_id = self.getMyID(self.gist_name) # depends on [control=['if'], data=['args']]
elif 'id' in args:
self.gist_id = args['id'] # depends on [control=['if'], data=['args']]
else:
raise Exception('Gist Name/ID must be provided')
if self.gist_id:
r = requests.get('%s/gists/%s' % (BASE_URL, self.gist_id), headers=self.gist.header)
if r.status_code == 200:
content = {'Github-User': r.json()['user']['login'], 'GistID': r.json()['id'], 'Gist-Link': '%s/%s/%s' % (GIST_URL, self.gist.username, r.json()['id']), 'Clone-Link': '%s/%s.git' % (GIST_URL, r.json()['id']), 'Embed-Script': '<script src="%s/%s/%s.js"</script>' % (GIST_URL, self.gist.username, r.json()['id'])}
return content # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
raise Exception('No such gist found')
|
def __merge_by_signle_link(self):
"""!
@brief Merges the most similar clusters in line with single link type.
"""
minimum_single_distance = float('Inf');
indexes = None;
for index_cluster1 in range(0, len(self.__clusters)):
for index_cluster2 in range(index_cluster1 + 1, len(self.__clusters)):
candidate_minimum_distance = self.__calculate_nearest_distance(index_cluster1, index_cluster2);
if (candidate_minimum_distance < minimum_single_distance):
minimum_single_distance = candidate_minimum_distance;
indexes = [index_cluster1, index_cluster2];
self.__clusters[indexes[0]] += self.__clusters[indexes[1]];
self.__clusters.pop(indexes[1]);
|
def function[__merge_by_signle_link, parameter[self]]:
constant[!
@brief Merges the most similar clusters in line with single link type.
]
variable[minimum_single_distance] assign[=] call[name[float], parameter[constant[Inf]]]
variable[indexes] assign[=] constant[None]
for taget[name[index_cluster1]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[self].__clusters]]]]] begin[:]
for taget[name[index_cluster2]] in starred[call[name[range], parameter[binary_operation[name[index_cluster1] + constant[1]], call[name[len], parameter[name[self].__clusters]]]]] begin[:]
variable[candidate_minimum_distance] assign[=] call[name[self].__calculate_nearest_distance, parameter[name[index_cluster1], name[index_cluster2]]]
if compare[name[candidate_minimum_distance] less[<] name[minimum_single_distance]] begin[:]
variable[minimum_single_distance] assign[=] name[candidate_minimum_distance]
variable[indexes] assign[=] list[[<ast.Name object at 0x7da1b0190460>, <ast.Name object at 0x7da1b0192bc0>]]
<ast.AugAssign object at 0x7da1b0193070>
call[name[self].__clusters.pop, parameter[call[name[indexes]][constant[1]]]]
|
keyword[def] identifier[__merge_by_signle_link] ( identifier[self] ):
literal[string]
identifier[minimum_single_distance] = identifier[float] ( literal[string] );
identifier[indexes] = keyword[None] ;
keyword[for] identifier[index_cluster1] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[__clusters] )):
keyword[for] identifier[index_cluster2] keyword[in] identifier[range] ( identifier[index_cluster1] + literal[int] , identifier[len] ( identifier[self] . identifier[__clusters] )):
identifier[candidate_minimum_distance] = identifier[self] . identifier[__calculate_nearest_distance] ( identifier[index_cluster1] , identifier[index_cluster2] );
keyword[if] ( identifier[candidate_minimum_distance] < identifier[minimum_single_distance] ):
identifier[minimum_single_distance] = identifier[candidate_minimum_distance] ;
identifier[indexes] =[ identifier[index_cluster1] , identifier[index_cluster2] ];
identifier[self] . identifier[__clusters] [ identifier[indexes] [ literal[int] ]]+= identifier[self] . identifier[__clusters] [ identifier[indexes] [ literal[int] ]];
identifier[self] . identifier[__clusters] . identifier[pop] ( identifier[indexes] [ literal[int] ]);
|
def __merge_by_signle_link(self):
"""!
@brief Merges the most similar clusters in line with single link type.
"""
minimum_single_distance = float('Inf')
indexes = None
for index_cluster1 in range(0, len(self.__clusters)):
for index_cluster2 in range(index_cluster1 + 1, len(self.__clusters)):
candidate_minimum_distance = self.__calculate_nearest_distance(index_cluster1, index_cluster2)
if candidate_minimum_distance < minimum_single_distance:
minimum_single_distance = candidate_minimum_distance
indexes = [index_cluster1, index_cluster2] # depends on [control=['if'], data=['candidate_minimum_distance', 'minimum_single_distance']] # depends on [control=['for'], data=['index_cluster2']] # depends on [control=['for'], data=['index_cluster1']]
self.__clusters[indexes[0]] += self.__clusters[indexes[1]]
self.__clusters.pop(indexes[1])
|
def mouseMoveEvent( self, event ):
"""
Sets the value for the slider at the event position.
:param event | <QMouseEvent>
"""
self.setValue(self.valueAt(event.pos().x()))
|
def function[mouseMoveEvent, parameter[self, event]]:
constant[
Sets the value for the slider at the event position.
:param event | <QMouseEvent>
]
call[name[self].setValue, parameter[call[name[self].valueAt, parameter[call[call[name[event].pos, parameter[]].x, parameter[]]]]]]
|
keyword[def] identifier[mouseMoveEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[setValue] ( identifier[self] . identifier[valueAt] ( identifier[event] . identifier[pos] (). identifier[x] ()))
|
def mouseMoveEvent(self, event):
"""
Sets the value for the slider at the event position.
:param event | <QMouseEvent>
"""
self.setValue(self.valueAt(event.pos().x()))
|
def _get_columns_relevant_for_diff(columns_to_show):
"""
Extract columns that are relevant for the diff table.
@param columns_to_show: (list) A list of columns that should be shown
@return: (set) Set of columns that are relevant for the diff table. If
none is marked relevant, the column named "status" will be
returned in the set.
"""
cols = set([col.title for col in columns_to_show if col.relevant_for_diff])
if len(cols) == 0:
return set(
[col.title for col in columns_to_show if col.title == "status"])
else:
return cols
|
def function[_get_columns_relevant_for_diff, parameter[columns_to_show]]:
constant[
Extract columns that are relevant for the diff table.
@param columns_to_show: (list) A list of columns that should be shown
@return: (set) Set of columns that are relevant for the diff table. If
none is marked relevant, the column named "status" will be
returned in the set.
]
variable[cols] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da2044c2860>]]
if compare[call[name[len], parameter[name[cols]]] equal[==] constant[0]] begin[:]
return[call[name[set], parameter[<ast.ListComp object at 0x7da2044c3160>]]]
|
keyword[def] identifier[_get_columns_relevant_for_diff] ( identifier[columns_to_show] ):
literal[string]
identifier[cols] = identifier[set] ([ identifier[col] . identifier[title] keyword[for] identifier[col] keyword[in] identifier[columns_to_show] keyword[if] identifier[col] . identifier[relevant_for_diff] ])
keyword[if] identifier[len] ( identifier[cols] )== literal[int] :
keyword[return] identifier[set] (
[ identifier[col] . identifier[title] keyword[for] identifier[col] keyword[in] identifier[columns_to_show] keyword[if] identifier[col] . identifier[title] == literal[string] ])
keyword[else] :
keyword[return] identifier[cols]
|
def _get_columns_relevant_for_diff(columns_to_show):
"""
Extract columns that are relevant for the diff table.
@param columns_to_show: (list) A list of columns that should be shown
@return: (set) Set of columns that are relevant for the diff table. If
none is marked relevant, the column named "status" will be
returned in the set.
"""
cols = set([col.title for col in columns_to_show if col.relevant_for_diff])
if len(cols) == 0:
return set([col.title for col in columns_to_show if col.title == 'status']) # depends on [control=['if'], data=[]]
else:
return cols
|
def regexp(__string: str, __pattern: str, __repl: Union[Callable, str], *,
count: int = 0, flags: int = 0) -> str:
"""Jinja filter for regexp replacements.
See :func:`re.sub` for documentation.
Returns:
Text with substitutions applied
"""
return re.sub(__pattern, __repl, __string, count, flags)
|
def function[regexp, parameter[__string, __pattern, __repl]]:
constant[Jinja filter for regexp replacements.
See :func:`re.sub` for documentation.
Returns:
Text with substitutions applied
]
return[call[name[re].sub, parameter[name[__pattern], name[__repl], name[__string], name[count], name[flags]]]]
|
keyword[def] identifier[regexp] ( identifier[__string] : identifier[str] , identifier[__pattern] : identifier[str] , identifier[__repl] : identifier[Union] [ identifier[Callable] , identifier[str] ],*,
identifier[count] : identifier[int] = literal[int] , identifier[flags] : identifier[int] = literal[int] )-> identifier[str] :
literal[string]
keyword[return] identifier[re] . identifier[sub] ( identifier[__pattern] , identifier[__repl] , identifier[__string] , identifier[count] , identifier[flags] )
|
def regexp(__string: str, __pattern: str, __repl: Union[Callable, str], *, count: int=0, flags: int=0) -> str:
"""Jinja filter for regexp replacements.
See :func:`re.sub` for documentation.
Returns:
Text with substitutions applied
"""
return re.sub(__pattern, __repl, __string, count, flags)
|
def start(self):
"""
Start the node on the cloud using the given instance properties.
This method is non-blocking: as soon as the node id is returned from
the cloud provider, it will return. The `is_alive`:meth: and
`update_ips`:meth: methods should be used to further gather details
about the state of the node.
"""
log.info("Starting node `%s` from image `%s` with flavor %s ...",
self.name, self.image_id, self.flavor)
self.instance_id = self._cloud_provider.start_instance(
self.user_key_name, self.user_key_public, self.user_key_private,
self.security_group,
self.flavor, self.image_id, self.image_userdata,
username=self.image_user,
node_name=("%s-%s" % (self.cluster_name, self.name)),
**self.extra)
log.debug("Node `%s` has instance ID `%s`", self.name, self.instance_id)
|
def function[start, parameter[self]]:
constant[
Start the node on the cloud using the given instance properties.
This method is non-blocking: as soon as the node id is returned from
the cloud provider, it will return. The `is_alive`:meth: and
`update_ips`:meth: methods should be used to further gather details
about the state of the node.
]
call[name[log].info, parameter[constant[Starting node `%s` from image `%s` with flavor %s ...], name[self].name, name[self].image_id, name[self].flavor]]
name[self].instance_id assign[=] call[name[self]._cloud_provider.start_instance, parameter[name[self].user_key_name, name[self].user_key_public, name[self].user_key_private, name[self].security_group, name[self].flavor, name[self].image_id, name[self].image_userdata]]
call[name[log].debug, parameter[constant[Node `%s` has instance ID `%s`], name[self].name, name[self].instance_id]]
|
keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] ,
identifier[self] . identifier[name] , identifier[self] . identifier[image_id] , identifier[self] . identifier[flavor] )
identifier[self] . identifier[instance_id] = identifier[self] . identifier[_cloud_provider] . identifier[start_instance] (
identifier[self] . identifier[user_key_name] , identifier[self] . identifier[user_key_public] , identifier[self] . identifier[user_key_private] ,
identifier[self] . identifier[security_group] ,
identifier[self] . identifier[flavor] , identifier[self] . identifier[image_id] , identifier[self] . identifier[image_userdata] ,
identifier[username] = identifier[self] . identifier[image_user] ,
identifier[node_name] =( literal[string] %( identifier[self] . identifier[cluster_name] , identifier[self] . identifier[name] )),
** identifier[self] . identifier[extra] )
identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[instance_id] )
|
def start(self):
"""
Start the node on the cloud using the given instance properties.
This method is non-blocking: as soon as the node id is returned from
the cloud provider, it will return. The `is_alive`:meth: and
`update_ips`:meth: methods should be used to further gather details
about the state of the node.
"""
log.info('Starting node `%s` from image `%s` with flavor %s ...', self.name, self.image_id, self.flavor)
self.instance_id = self._cloud_provider.start_instance(self.user_key_name, self.user_key_public, self.user_key_private, self.security_group, self.flavor, self.image_id, self.image_userdata, username=self.image_user, node_name='%s-%s' % (self.cluster_name, self.name), **self.extra)
log.debug('Node `%s` has instance ID `%s`', self.name, self.instance_id)
|
def cleanDescriptor(self, dir=os.getcwd()):
"""
Cleans the build artifacts for the Unreal project or plugin in the specified directory
"""
# Verify that an Unreal project or plugin exists in the specified directory
descriptor = self.getDescriptor(dir)
# Because performing a clean will also delete the engine build itself when using
# a source build, we simply delete the `Binaries` and `Intermediate` directories
shutil.rmtree(os.path.join(dir, 'Binaries'), ignore_errors=True)
shutil.rmtree(os.path.join(dir, 'Intermediate'), ignore_errors=True)
# If we are cleaning a project, also clean any plugins
if self.isProject(descriptor):
projectPlugins = glob.glob(os.path.join(dir, 'Plugins', '*'))
for pluginDir in projectPlugins:
self.cleanDescriptor(pluginDir)
|
def function[cleanDescriptor, parameter[self, dir]]:
constant[
Cleans the build artifacts for the Unreal project or plugin in the specified directory
]
variable[descriptor] assign[=] call[name[self].getDescriptor, parameter[name[dir]]]
call[name[shutil].rmtree, parameter[call[name[os].path.join, parameter[name[dir], constant[Binaries]]]]]
call[name[shutil].rmtree, parameter[call[name[os].path.join, parameter[name[dir], constant[Intermediate]]]]]
if call[name[self].isProject, parameter[name[descriptor]]] begin[:]
variable[projectPlugins] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[dir], constant[Plugins], constant[*]]]]]
for taget[name[pluginDir]] in starred[name[projectPlugins]] begin[:]
call[name[self].cleanDescriptor, parameter[name[pluginDir]]]
|
keyword[def] identifier[cleanDescriptor] ( identifier[self] , identifier[dir] = identifier[os] . identifier[getcwd] ()):
literal[string]
identifier[descriptor] = identifier[self] . identifier[getDescriptor] ( identifier[dir] )
identifier[shutil] . identifier[rmtree] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , literal[string] ), identifier[ignore_errors] = keyword[True] )
identifier[shutil] . identifier[rmtree] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , literal[string] ), identifier[ignore_errors] = keyword[True] )
keyword[if] identifier[self] . identifier[isProject] ( identifier[descriptor] ):
identifier[projectPlugins] = identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , literal[string] , literal[string] ))
keyword[for] identifier[pluginDir] keyword[in] identifier[projectPlugins] :
identifier[self] . identifier[cleanDescriptor] ( identifier[pluginDir] )
|
def cleanDescriptor(self, dir=os.getcwd()):
"""
Cleans the build artifacts for the Unreal project or plugin in the specified directory
""" # Verify that an Unreal project or plugin exists in the specified directory
descriptor = self.getDescriptor(dir) # Because performing a clean will also delete the engine build itself when using
# a source build, we simply delete the `Binaries` and `Intermediate` directories
shutil.rmtree(os.path.join(dir, 'Binaries'), ignore_errors=True)
shutil.rmtree(os.path.join(dir, 'Intermediate'), ignore_errors=True) # If we are cleaning a project, also clean any plugins
if self.isProject(descriptor):
projectPlugins = glob.glob(os.path.join(dir, 'Plugins', '*'))
for pluginDir in projectPlugins:
self.cleanDescriptor(pluginDir) # depends on [control=['for'], data=['pluginDir']] # depends on [control=['if'], data=[]]
|
def _handle_request(self, scheme, netloc, path, headers, body=None, method="GET"):
"""
Run the actual request
"""
backend_url = "{}://{}{}".format(scheme, netloc, path)
try:
response = self.http_request.request(backend_url, method=method, body=body, headers=dict(headers))
self._return_response(response)
except Exception as e:
body = "Invalid response from backend: '{}' Server might be busy".format(e.message)
logging.debug(body)
self.send_error(httplib.SERVICE_UNAVAILABLE, body)
|
def function[_handle_request, parameter[self, scheme, netloc, path, headers, body, method]]:
constant[
Run the actual request
]
variable[backend_url] assign[=] call[constant[{}://{}{}].format, parameter[name[scheme], name[netloc], name[path]]]
<ast.Try object at 0x7da1affe5990>
|
keyword[def] identifier[_handle_request] ( identifier[self] , identifier[scheme] , identifier[netloc] , identifier[path] , identifier[headers] , identifier[body] = keyword[None] , identifier[method] = literal[string] ):
literal[string]
identifier[backend_url] = literal[string] . identifier[format] ( identifier[scheme] , identifier[netloc] , identifier[path] )
keyword[try] :
identifier[response] = identifier[self] . identifier[http_request] . identifier[request] ( identifier[backend_url] , identifier[method] = identifier[method] , identifier[body] = identifier[body] , identifier[headers] = identifier[dict] ( identifier[headers] ))
identifier[self] . identifier[_return_response] ( identifier[response] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[body] = literal[string] . identifier[format] ( identifier[e] . identifier[message] )
identifier[logging] . identifier[debug] ( identifier[body] )
identifier[self] . identifier[send_error] ( identifier[httplib] . identifier[SERVICE_UNAVAILABLE] , identifier[body] )
|
def _handle_request(self, scheme, netloc, path, headers, body=None, method='GET'):
"""
Run the actual request
"""
backend_url = '{}://{}{}'.format(scheme, netloc, path)
try:
response = self.http_request.request(backend_url, method=method, body=body, headers=dict(headers))
self._return_response(response) # depends on [control=['try'], data=[]]
except Exception as e:
body = "Invalid response from backend: '{}' Server might be busy".format(e.message)
logging.debug(body)
self.send_error(httplib.SERVICE_UNAVAILABLE, body) # depends on [control=['except'], data=['e']]
|
def _get_go2nt(self, goids):
"""Get go2nt for given goids."""
go2nt_all = self.grprobj.go2nt
return {go:go2nt_all[go] for go in goids}
|
def function[_get_go2nt, parameter[self, goids]]:
constant[Get go2nt for given goids.]
variable[go2nt_all] assign[=] name[self].grprobj.go2nt
return[<ast.DictComp object at 0x7da20c6a98d0>]
|
keyword[def] identifier[_get_go2nt] ( identifier[self] , identifier[goids] ):
literal[string]
identifier[go2nt_all] = identifier[self] . identifier[grprobj] . identifier[go2nt]
keyword[return] { identifier[go] : identifier[go2nt_all] [ identifier[go] ] keyword[for] identifier[go] keyword[in] identifier[goids] }
|
def _get_go2nt(self, goids):
"""Get go2nt for given goids."""
go2nt_all = self.grprobj.go2nt
return {go: go2nt_all[go] for go in goids}
|
def get_and_cache_account(self, addr):
"""Gets and caches an account for an addres, creates blank if not
found.
:param addr:
:return:
"""
if addr in self.cache:
return self.cache[addr]
rlpdata = self.secure_trie.get(addr)
if (
rlpdata == trie.BLANK_NODE and len(addr) == 32
): # support for hashed addresses
rlpdata = self.trie.get(addr)
if rlpdata != trie.BLANK_NODE:
o = rlp.decode(rlpdata, Account, db=self.db, addr=addr)
else:
o = Account.blank_account(self.db, addr, 0)
self.cache[addr] = o
o._mutable = True
o._cached_rlp = None
return o
|
def function[get_and_cache_account, parameter[self, addr]]:
constant[Gets and caches an account for an addres, creates blank if not
found.
:param addr:
:return:
]
if compare[name[addr] in name[self].cache] begin[:]
return[call[name[self].cache][name[addr]]]
variable[rlpdata] assign[=] call[name[self].secure_trie.get, parameter[name[addr]]]
if <ast.BoolOp object at 0x7da1b1df4400> begin[:]
variable[rlpdata] assign[=] call[name[self].trie.get, parameter[name[addr]]]
if compare[name[rlpdata] not_equal[!=] name[trie].BLANK_NODE] begin[:]
variable[o] assign[=] call[name[rlp].decode, parameter[name[rlpdata], name[Account]]]
call[name[self].cache][name[addr]] assign[=] name[o]
name[o]._mutable assign[=] constant[True]
name[o]._cached_rlp assign[=] constant[None]
return[name[o]]
|
keyword[def] identifier[get_and_cache_account] ( identifier[self] , identifier[addr] ):
literal[string]
keyword[if] identifier[addr] keyword[in] identifier[self] . identifier[cache] :
keyword[return] identifier[self] . identifier[cache] [ identifier[addr] ]
identifier[rlpdata] = identifier[self] . identifier[secure_trie] . identifier[get] ( identifier[addr] )
keyword[if] (
identifier[rlpdata] == identifier[trie] . identifier[BLANK_NODE] keyword[and] identifier[len] ( identifier[addr] )== literal[int]
):
identifier[rlpdata] = identifier[self] . identifier[trie] . identifier[get] ( identifier[addr] )
keyword[if] identifier[rlpdata] != identifier[trie] . identifier[BLANK_NODE] :
identifier[o] = identifier[rlp] . identifier[decode] ( identifier[rlpdata] , identifier[Account] , identifier[db] = identifier[self] . identifier[db] , identifier[addr] = identifier[addr] )
keyword[else] :
identifier[o] = identifier[Account] . identifier[blank_account] ( identifier[self] . identifier[db] , identifier[addr] , literal[int] )
identifier[self] . identifier[cache] [ identifier[addr] ]= identifier[o]
identifier[o] . identifier[_mutable] = keyword[True]
identifier[o] . identifier[_cached_rlp] = keyword[None]
keyword[return] identifier[o]
|
def get_and_cache_account(self, addr):
"""Gets and caches an account for an addres, creates blank if not
found.
:param addr:
:return:
"""
if addr in self.cache:
return self.cache[addr] # depends on [control=['if'], data=['addr']]
rlpdata = self.secure_trie.get(addr)
if rlpdata == trie.BLANK_NODE and len(addr) == 32: # support for hashed addresses
rlpdata = self.trie.get(addr) # depends on [control=['if'], data=[]]
if rlpdata != trie.BLANK_NODE:
o = rlp.decode(rlpdata, Account, db=self.db, addr=addr) # depends on [control=['if'], data=['rlpdata']]
else:
o = Account.blank_account(self.db, addr, 0)
self.cache[addr] = o
o._mutable = True
o._cached_rlp = None
return o
|
def remove(self):
"""
Remove the PID file.
"""
if isfile(self.pid_file):
try:
remove(self.pid_file)
except Exception as e:
self.die('Failed to remove PID file: {}'.format(str(e)))
else:
return True
|
def function[remove, parameter[self]]:
constant[
Remove the PID file.
]
if call[name[isfile], parameter[name[self].pid_file]] begin[:]
<ast.Try object at 0x7da18f722dd0>
|
keyword[def] identifier[remove] ( identifier[self] ):
literal[string]
keyword[if] identifier[isfile] ( identifier[self] . identifier[pid_file] ):
keyword[try] :
identifier[remove] ( identifier[self] . identifier[pid_file] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[die] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] )))
keyword[else] :
keyword[return] keyword[True]
|
def remove(self):
"""
Remove the PID file.
"""
if isfile(self.pid_file):
try:
remove(self.pid_file) # depends on [control=['try'], data=[]]
except Exception as e:
self.die('Failed to remove PID file: {}'.format(str(e))) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
else:
return True
|
def add_filehandler(level, fmt, filename, mode, backup_count, limit, when):
"""Add a file handler to the global logger."""
kwargs = {}
# If the filename is not set, use the default filename
if filename is None:
filename = getattr(sys.modules['__main__'], '__file__', 'log.py')
filename = os.path.basename(filename.replace('.py', '.log'))
filename = os.path.join('/tmp', filename)
if not os.path.exists(os.path.dirname(filename)):
os.mkdir(os.path.dirname(filename))
kwargs['filename'] = filename
# Choose the filehandler based on the passed arguments
if backup_count == 0: # Use FileHandler
cls = logging.FileHandler
kwargs['mode'] = mode
elif when is None: # Use RotatingFileHandler
cls = logging.handlers.RotatingFileHandler
kwargs['maxBytes'] = limit
kwargs['backupCount'] = backup_count
kwargs['mode'] = mode
else: # Use TimedRotatingFileHandler
cls = logging.handlers.TimedRotatingFileHandler
kwargs['when'] = when
kwargs['interval'] = limit
kwargs['backupCount'] = backup_count
return add_handler(cls, level, fmt, False, **kwargs)
|
def function[add_filehandler, parameter[level, fmt, filename, mode, backup_count, limit, when]]:
constant[Add a file handler to the global logger.]
variable[kwargs] assign[=] dictionary[[], []]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] call[name[getattr], parameter[call[name[sys].modules][constant[__main__]], constant[__file__], constant[log.py]]]
variable[filename] assign[=] call[name[os].path.basename, parameter[call[name[filename].replace, parameter[constant[.py], constant[.log]]]]]
variable[filename] assign[=] call[name[os].path.join, parameter[constant[/tmp], name[filename]]]
if <ast.UnaryOp object at 0x7da18ede56f0> begin[:]
call[name[os].mkdir, parameter[call[name[os].path.dirname, parameter[name[filename]]]]]
call[name[kwargs]][constant[filename]] assign[=] name[filename]
if compare[name[backup_count] equal[==] constant[0]] begin[:]
variable[cls] assign[=] name[logging].FileHandler
call[name[kwargs]][constant[mode]] assign[=] name[mode]
return[call[name[add_handler], parameter[name[cls], name[level], name[fmt], constant[False]]]]
|
keyword[def] identifier[add_filehandler] ( identifier[level] , identifier[fmt] , identifier[filename] , identifier[mode] , identifier[backup_count] , identifier[limit] , identifier[when] ):
literal[string]
identifier[kwargs] ={}
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[getattr] ( identifier[sys] . identifier[modules] [ literal[string] ], literal[string] , literal[string] )
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] . identifier[replace] ( literal[string] , literal[string] ))
identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[filename] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] )):
identifier[os] . identifier[mkdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ))
identifier[kwargs] [ literal[string] ]= identifier[filename]
keyword[if] identifier[backup_count] == literal[int] :
identifier[cls] = identifier[logging] . identifier[FileHandler]
identifier[kwargs] [ literal[string] ]= identifier[mode]
keyword[elif] identifier[when] keyword[is] keyword[None] :
identifier[cls] = identifier[logging] . identifier[handlers] . identifier[RotatingFileHandler]
identifier[kwargs] [ literal[string] ]= identifier[limit]
identifier[kwargs] [ literal[string] ]= identifier[backup_count]
identifier[kwargs] [ literal[string] ]= identifier[mode]
keyword[else] :
identifier[cls] = identifier[logging] . identifier[handlers] . identifier[TimedRotatingFileHandler]
identifier[kwargs] [ literal[string] ]= identifier[when]
identifier[kwargs] [ literal[string] ]= identifier[limit]
identifier[kwargs] [ literal[string] ]= identifier[backup_count]
keyword[return] identifier[add_handler] ( identifier[cls] , identifier[level] , identifier[fmt] , keyword[False] ,** identifier[kwargs] )
|
def add_filehandler(level, fmt, filename, mode, backup_count, limit, when):
"""Add a file handler to the global logger."""
kwargs = {}
# If the filename is not set, use the default filename
if filename is None:
filename = getattr(sys.modules['__main__'], '__file__', 'log.py')
filename = os.path.basename(filename.replace('.py', '.log'))
filename = os.path.join('/tmp', filename) # depends on [control=['if'], data=['filename']]
if not os.path.exists(os.path.dirname(filename)):
os.mkdir(os.path.dirname(filename)) # depends on [control=['if'], data=[]]
kwargs['filename'] = filename
# Choose the filehandler based on the passed arguments
if backup_count == 0: # Use FileHandler
cls = logging.FileHandler
kwargs['mode'] = mode # depends on [control=['if'], data=[]]
elif when is None: # Use RotatingFileHandler
cls = logging.handlers.RotatingFileHandler
kwargs['maxBytes'] = limit
kwargs['backupCount'] = backup_count
kwargs['mode'] = mode # depends on [control=['if'], data=[]]
else: # Use TimedRotatingFileHandler
cls = logging.handlers.TimedRotatingFileHandler
kwargs['when'] = when
kwargs['interval'] = limit
kwargs['backupCount'] = backup_count
return add_handler(cls, level, fmt, False, **kwargs)
|
def WIFI(frame, no_rtap=False):
"""calls wifi packet discriminator and constructor.
:frame: ctypes.Structure
:no_rtap: Bool
:return: packet object in success
:return: int
-1 on known error
:return: int
-2 on unknown error
"""
pack = None
try:
pack = WiHelper.get_wifi_packet(frame, no_rtap)
except Exception as e:
logging.exception(e)
return pack
|
def function[WIFI, parameter[frame, no_rtap]]:
constant[calls wifi packet discriminator and constructor.
:frame: ctypes.Structure
:no_rtap: Bool
:return: packet object in success
:return: int
-1 on known error
:return: int
-2 on unknown error
]
variable[pack] assign[=] constant[None]
<ast.Try object at 0x7da1b00da830>
return[name[pack]]
|
keyword[def] identifier[WIFI] ( identifier[frame] , identifier[no_rtap] = keyword[False] ):
literal[string]
identifier[pack] = keyword[None]
keyword[try] :
identifier[pack] = identifier[WiHelper] . identifier[get_wifi_packet] ( identifier[frame] , identifier[no_rtap] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[exception] ( identifier[e] )
keyword[return] identifier[pack]
|
def WIFI(frame, no_rtap=False):
"""calls wifi packet discriminator and constructor.
:frame: ctypes.Structure
:no_rtap: Bool
:return: packet object in success
:return: int
-1 on known error
:return: int
-2 on unknown error
"""
pack = None
try:
pack = WiHelper.get_wifi_packet(frame, no_rtap) # depends on [control=['try'], data=[]]
except Exception as e:
logging.exception(e) # depends on [control=['except'], data=['e']]
return pack
|
def _uses_aiohttp_session(func):
"""This is a decorator that creates an async with statement around a function, and makes sure that a _session argument is always passed.
Only usable on async functions of course.
The _session argument is (supposed to be) an aiohttp.ClientSession instance in all functions that this decorator has been used on.
This is used to make sure that all session objects are properly entered and exited, or that they are passed into a function properly.
This adds an session keyword argument to the method signature, and that session will be used as _session if it is not None."""
# The function the decorator returns
async def decorated_func(*args, session=None, **kwargs):
if session is not None:
# There is a session passed
return await func(*args, _session=session, **kwargs)
else:
# The session argument wasn't passed, so we create our own
async with aiohttp.ClientSession() as new_session:
return await func(*args, _session=new_session, **kwargs)
# We return the decorated func
return decorated_func
|
def function[_uses_aiohttp_session, parameter[func]]:
constant[This is a decorator that creates an async with statement around a function, and makes sure that a _session argument is always passed.
Only usable on async functions of course.
The _session argument is (supposed to be) an aiohttp.ClientSession instance in all functions that this decorator has been used on.
This is used to make sure that all session objects are properly entered and exited, or that they are passed into a function properly.
This adds an session keyword argument to the method signature, and that session will be used as _session if it is not None.]
<ast.AsyncFunctionDef object at 0x7da1b0f0e9b0>
return[name[decorated_func]]
|
keyword[def] identifier[_uses_aiohttp_session] ( identifier[func] ):
literal[string]
keyword[async] keyword[def] identifier[decorated_func] (* identifier[args] , identifier[session] = keyword[None] ,** identifier[kwargs] ):
keyword[if] identifier[session] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[await] identifier[func] (* identifier[args] , identifier[_session] = identifier[session] ,** identifier[kwargs] )
keyword[else] :
keyword[async] keyword[with] identifier[aiohttp] . identifier[ClientSession] () keyword[as] identifier[new_session] :
keyword[return] keyword[await] identifier[func] (* identifier[args] , identifier[_session] = identifier[new_session] ,** identifier[kwargs] )
keyword[return] identifier[decorated_func]
|
def _uses_aiohttp_session(func):
"""This is a decorator that creates an async with statement around a function, and makes sure that a _session argument is always passed.
Only usable on async functions of course.
The _session argument is (supposed to be) an aiohttp.ClientSession instance in all functions that this decorator has been used on.
This is used to make sure that all session objects are properly entered and exited, or that they are passed into a function properly.
This adds an session keyword argument to the method signature, and that session will be used as _session if it is not None."""
# The function the decorator returns
async def decorated_func(*args, session=None, **kwargs):
if session is not None:
# There is a session passed
return await func(*args, _session=session, **kwargs) # depends on [control=['if'], data=['session']]
else:
# The session argument wasn't passed, so we create our own
async with aiohttp.ClientSession() as new_session:
return await func(*args, _session=new_session, **kwargs)
# We return the decorated func
return decorated_func
|
def filter_convolve(data, filters, filter_rot=False, method='scipy'):
r"""Filter convolve
This method convolves the input image with the wavelet filters
Parameters
----------
data : np.ndarray
Input data, 2D array
filters : np.ndarray
Wavelet filters, 3D array
filter_rot : bool, optional
Option to rotate wavelet filters (default is 'False')
method : str {'astropy', 'scipy'}, optional
Convolution method (default is 'scipy')
Returns
-------
np.ndarray convolved data
Examples
--------
>>> from modopt.signal.wavelet import filter_convolve
>>> x = np.arange(9).reshape(3, 3).astype(float)
>>> y = np.arange(36).reshape(4, 3, 3).astype(float)
>>> filter_convolve(x, y)
array([[[ 174., 165., 174.],
[ 93., 84., 93.],
[ 174., 165., 174.]],
[[ 498., 489., 498.],
[ 417., 408., 417.],
[ 498., 489., 498.]],
[[ 822., 813., 822.],
[ 741., 732., 741.],
[ 822., 813., 822.]],
[[ 1146., 1137., 1146.],
[ 1065., 1056., 1065.],
[ 1146., 1137., 1146.]]])
>>> filter_convolve(y, y, filter_rot=True)
array([[ 14550., 14586., 14550.],
[ 14874., 14910., 14874.],
[ 14550., 14586., 14550.]])
"""
if filter_rot:
return np.sum([convolve(coef, f, method=method) for coef, f in
zip(data, rotate_stack(filters))], axis=0)
else:
return np.array([convolve(data, f, method=method) for f in filters])
|
def function[filter_convolve, parameter[data, filters, filter_rot, method]]:
constant[Filter convolve
This method convolves the input image with the wavelet filters
Parameters
----------
data : np.ndarray
Input data, 2D array
filters : np.ndarray
Wavelet filters, 3D array
filter_rot : bool, optional
Option to rotate wavelet filters (default is 'False')
method : str {'astropy', 'scipy'}, optional
Convolution method (default is 'scipy')
Returns
-------
np.ndarray convolved data
Examples
--------
>>> from modopt.signal.wavelet import filter_convolve
>>> x = np.arange(9).reshape(3, 3).astype(float)
>>> y = np.arange(36).reshape(4, 3, 3).astype(float)
>>> filter_convolve(x, y)
array([[[ 174., 165., 174.],
[ 93., 84., 93.],
[ 174., 165., 174.]],
[[ 498., 489., 498.],
[ 417., 408., 417.],
[ 498., 489., 498.]],
[[ 822., 813., 822.],
[ 741., 732., 741.],
[ 822., 813., 822.]],
[[ 1146., 1137., 1146.],
[ 1065., 1056., 1065.],
[ 1146., 1137., 1146.]]])
>>> filter_convolve(y, y, filter_rot=True)
array([[ 14550., 14586., 14550.],
[ 14874., 14910., 14874.],
[ 14550., 14586., 14550.]])
]
if name[filter_rot] begin[:]
return[call[name[np].sum, parameter[<ast.ListComp object at 0x7da1b0dbd570>]]]
|
keyword[def] identifier[filter_convolve] ( identifier[data] , identifier[filters] , identifier[filter_rot] = keyword[False] , identifier[method] = literal[string] ):
literal[string]
keyword[if] identifier[filter_rot] :
keyword[return] identifier[np] . identifier[sum] ([ identifier[convolve] ( identifier[coef] , identifier[f] , identifier[method] = identifier[method] ) keyword[for] identifier[coef] , identifier[f] keyword[in]
identifier[zip] ( identifier[data] , identifier[rotate_stack] ( identifier[filters] ))], identifier[axis] = literal[int] )
keyword[else] :
keyword[return] identifier[np] . identifier[array] ([ identifier[convolve] ( identifier[data] , identifier[f] , identifier[method] = identifier[method] ) keyword[for] identifier[f] keyword[in] identifier[filters] ])
|
def filter_convolve(data, filters, filter_rot=False, method='scipy'):
"""Filter convolve
This method convolves the input image with the wavelet filters
Parameters
----------
data : np.ndarray
Input data, 2D array
filters : np.ndarray
Wavelet filters, 3D array
filter_rot : bool, optional
Option to rotate wavelet filters (default is 'False')
method : str {'astropy', 'scipy'}, optional
Convolution method (default is 'scipy')
Returns
-------
np.ndarray convolved data
Examples
--------
>>> from modopt.signal.wavelet import filter_convolve
>>> x = np.arange(9).reshape(3, 3).astype(float)
>>> y = np.arange(36).reshape(4, 3, 3).astype(float)
>>> filter_convolve(x, y)
array([[[ 174., 165., 174.],
[ 93., 84., 93.],
[ 174., 165., 174.]],
[[ 498., 489., 498.],
[ 417., 408., 417.],
[ 498., 489., 498.]],
[[ 822., 813., 822.],
[ 741., 732., 741.],
[ 822., 813., 822.]],
[[ 1146., 1137., 1146.],
[ 1065., 1056., 1065.],
[ 1146., 1137., 1146.]]])
>>> filter_convolve(y, y, filter_rot=True)
array([[ 14550., 14586., 14550.],
[ 14874., 14910., 14874.],
[ 14550., 14586., 14550.]])
"""
if filter_rot:
return np.sum([convolve(coef, f, method=method) for (coef, f) in zip(data, rotate_stack(filters))], axis=0) # depends on [control=['if'], data=[]]
else:
return np.array([convolve(data, f, method=method) for f in filters])
|
def hosts_to_endpoints(hosts, port=2181):
"""
return a list of (host, port) tuples from a given host[:port],... str
"""
endpoints = []
for host in hosts.split(","):
endpoints.append(tuple(host.rsplit(":", 1)) if ":" in host else (host, port))
return endpoints
|
def function[hosts_to_endpoints, parameter[hosts, port]]:
constant[
return a list of (host, port) tuples from a given host[:port],... str
]
variable[endpoints] assign[=] list[[]]
for taget[name[host]] in starred[call[name[hosts].split, parameter[constant[,]]]] begin[:]
call[name[endpoints].append, parameter[<ast.IfExp object at 0x7da1b0780a90>]]
return[name[endpoints]]
|
keyword[def] identifier[hosts_to_endpoints] ( identifier[hosts] , identifier[port] = literal[int] ):
literal[string]
identifier[endpoints] =[]
keyword[for] identifier[host] keyword[in] identifier[hosts] . identifier[split] ( literal[string] ):
identifier[endpoints] . identifier[append] ( identifier[tuple] ( identifier[host] . identifier[rsplit] ( literal[string] , literal[int] )) keyword[if] literal[string] keyword[in] identifier[host] keyword[else] ( identifier[host] , identifier[port] ))
keyword[return] identifier[endpoints]
|
def hosts_to_endpoints(hosts, port=2181):
"""
return a list of (host, port) tuples from a given host[:port],... str
"""
endpoints = []
for host in hosts.split(','):
endpoints.append(tuple(host.rsplit(':', 1)) if ':' in host else (host, port)) # depends on [control=['for'], data=['host']]
return endpoints
|
def predict(self, data, output_margin=False, ntree_limit=0, pred_leaf=False):
"""
Predict with data.
NOTE: This function is not thread safe.
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call bst.copy() to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
output_margin : bool
Whether to output the raw untransformed margin value.
ntree_limit : int
Limit number of trees in the prediction; defaults to 0 (use all trees).
pred_leaf : bool
When this option is on, the output will be a matrix of (nsample, ntrees)
with each record indicating the predicted leaf index of each sample in each tree.
Note that the leaf index of a tree is unique per tree, so you may find leaf 1
in both tree 1 and tree 0.
Returns
-------
prediction : numpy array
"""
option_mask = 0x00
if output_margin:
option_mask |= 0x01
if pred_leaf:
option_mask |= 0x02
self._validate_features(data)
length = ctypes.c_ulong()
preds = ctypes.POINTER(ctypes.c_float)()
_check_call(_LIB.XGBoosterPredict(self.handle, data.handle,
option_mask, ntree_limit,
ctypes.byref(length),
ctypes.byref(preds)))
preds = ctypes2numpy(preds, length.value, np.float32)
if pred_leaf:
preds = preds.astype(np.int32)
nrow = data.num_row()
if preds.size != nrow and preds.size % nrow == 0:
preds = preds.reshape(nrow, preds.size / nrow)
return preds
|
def function[predict, parameter[self, data, output_margin, ntree_limit, pred_leaf]]:
constant[
Predict with data.
NOTE: This function is not thread safe.
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call bst.copy() to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
output_margin : bool
Whether to output the raw untransformed margin value.
ntree_limit : int
Limit number of trees in the prediction; defaults to 0 (use all trees).
pred_leaf : bool
When this option is on, the output will be a matrix of (nsample, ntrees)
with each record indicating the predicted leaf index of each sample in each tree.
Note that the leaf index of a tree is unique per tree, so you may find leaf 1
in both tree 1 and tree 0.
Returns
-------
prediction : numpy array
]
variable[option_mask] assign[=] constant[0]
if name[output_margin] begin[:]
<ast.AugAssign object at 0x7da1b21fc070>
if name[pred_leaf] begin[:]
<ast.AugAssign object at 0x7da1b21fc9a0>
call[name[self]._validate_features, parameter[name[data]]]
variable[length] assign[=] call[name[ctypes].c_ulong, parameter[]]
variable[preds] assign[=] call[call[name[ctypes].POINTER, parameter[name[ctypes].c_float]], parameter[]]
call[name[_check_call], parameter[call[name[_LIB].XGBoosterPredict, parameter[name[self].handle, name[data].handle, name[option_mask], name[ntree_limit], call[name[ctypes].byref, parameter[name[length]]], call[name[ctypes].byref, parameter[name[preds]]]]]]]
variable[preds] assign[=] call[name[ctypes2numpy], parameter[name[preds], name[length].value, name[np].float32]]
if name[pred_leaf] begin[:]
variable[preds] assign[=] call[name[preds].astype, parameter[name[np].int32]]
variable[nrow] assign[=] call[name[data].num_row, parameter[]]
if <ast.BoolOp object at 0x7da1b1f64220> begin[:]
variable[preds] assign[=] call[name[preds].reshape, parameter[name[nrow], binary_operation[name[preds].size / name[nrow]]]]
return[name[preds]]
|
keyword[def] identifier[predict] ( identifier[self] , identifier[data] , identifier[output_margin] = keyword[False] , identifier[ntree_limit] = literal[int] , identifier[pred_leaf] = keyword[False] ):
literal[string]
identifier[option_mask] = literal[int]
keyword[if] identifier[output_margin] :
identifier[option_mask] |= literal[int]
keyword[if] identifier[pred_leaf] :
identifier[option_mask] |= literal[int]
identifier[self] . identifier[_validate_features] ( identifier[data] )
identifier[length] = identifier[ctypes] . identifier[c_ulong] ()
identifier[preds] = identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_float] )()
identifier[_check_call] ( identifier[_LIB] . identifier[XGBoosterPredict] ( identifier[self] . identifier[handle] , identifier[data] . identifier[handle] ,
identifier[option_mask] , identifier[ntree_limit] ,
identifier[ctypes] . identifier[byref] ( identifier[length] ),
identifier[ctypes] . identifier[byref] ( identifier[preds] )))
identifier[preds] = identifier[ctypes2numpy] ( identifier[preds] , identifier[length] . identifier[value] , identifier[np] . identifier[float32] )
keyword[if] identifier[pred_leaf] :
identifier[preds] = identifier[preds] . identifier[astype] ( identifier[np] . identifier[int32] )
identifier[nrow] = identifier[data] . identifier[num_row] ()
keyword[if] identifier[preds] . identifier[size] != identifier[nrow] keyword[and] identifier[preds] . identifier[size] % identifier[nrow] == literal[int] :
identifier[preds] = identifier[preds] . identifier[reshape] ( identifier[nrow] , identifier[preds] . identifier[size] / identifier[nrow] )
keyword[return] identifier[preds]
|
def predict(self, data, output_margin=False, ntree_limit=0, pred_leaf=False):
"""
Predict with data.
NOTE: This function is not thread safe.
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call bst.copy() to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
output_margin : bool
Whether to output the raw untransformed margin value.
ntree_limit : int
Limit number of trees in the prediction; defaults to 0 (use all trees).
pred_leaf : bool
When this option is on, the output will be a matrix of (nsample, ntrees)
with each record indicating the predicted leaf index of each sample in each tree.
Note that the leaf index of a tree is unique per tree, so you may find leaf 1
in both tree 1 and tree 0.
Returns
-------
prediction : numpy array
"""
option_mask = 0
if output_margin:
option_mask |= 1 # depends on [control=['if'], data=[]]
if pred_leaf:
option_mask |= 2 # depends on [control=['if'], data=[]]
self._validate_features(data)
length = ctypes.c_ulong()
preds = ctypes.POINTER(ctypes.c_float)()
_check_call(_LIB.XGBoosterPredict(self.handle, data.handle, option_mask, ntree_limit, ctypes.byref(length), ctypes.byref(preds)))
preds = ctypes2numpy(preds, length.value, np.float32)
if pred_leaf:
preds = preds.astype(np.int32) # depends on [control=['if'], data=[]]
nrow = data.num_row()
if preds.size != nrow and preds.size % nrow == 0:
preds = preds.reshape(nrow, preds.size / nrow) # depends on [control=['if'], data=[]]
return preds
|
def get_cluster_config(
cluster_type,
cluster_name=None,
kafka_topology_base_path=None,
):
"""Return the cluster configuration.
Use the local cluster if cluster_name is not specified.
:param cluster_type: the type of the cluster
:type cluster_type: string
:param cluster_name: the name of the cluster
:type cluster_name: string
:param kafka_topology_base_path: base path to look for <cluster_type>.yaml
:type cluster_name: string
:returns: the cluster
:rtype: ClusterConfig
"""
if not kafka_topology_base_path:
config_dirs = get_conf_dirs()
else:
config_dirs = [kafka_topology_base_path]
topology = None
for config_dir in config_dirs:
try:
topology = TopologyConfiguration(
cluster_type,
config_dir,
)
except MissingConfigurationError:
pass
if not topology:
raise MissingConfigurationError(
"No available configuration for type {0}".format(cluster_type),
)
if cluster_name:
return topology.get_cluster_by_name(cluster_name)
else:
return topology.get_local_cluster()
|
def function[get_cluster_config, parameter[cluster_type, cluster_name, kafka_topology_base_path]]:
constant[Return the cluster configuration.
Use the local cluster if cluster_name is not specified.
:param cluster_type: the type of the cluster
:type cluster_type: string
:param cluster_name: the name of the cluster
:type cluster_name: string
:param kafka_topology_base_path: base path to look for <cluster_type>.yaml
:type cluster_name: string
:returns: the cluster
:rtype: ClusterConfig
]
if <ast.UnaryOp object at 0x7da1b0830790> begin[:]
variable[config_dirs] assign[=] call[name[get_conf_dirs], parameter[]]
variable[topology] assign[=] constant[None]
for taget[name[config_dir]] in starred[name[config_dirs]] begin[:]
<ast.Try object at 0x7da1b0831c30>
if <ast.UnaryOp object at 0x7da1b0831750> begin[:]
<ast.Raise object at 0x7da1b0833100>
if name[cluster_name] begin[:]
return[call[name[topology].get_cluster_by_name, parameter[name[cluster_name]]]]
|
keyword[def] identifier[get_cluster_config] (
identifier[cluster_type] ,
identifier[cluster_name] = keyword[None] ,
identifier[kafka_topology_base_path] = keyword[None] ,
):
literal[string]
keyword[if] keyword[not] identifier[kafka_topology_base_path] :
identifier[config_dirs] = identifier[get_conf_dirs] ()
keyword[else] :
identifier[config_dirs] =[ identifier[kafka_topology_base_path] ]
identifier[topology] = keyword[None]
keyword[for] identifier[config_dir] keyword[in] identifier[config_dirs] :
keyword[try] :
identifier[topology] = identifier[TopologyConfiguration] (
identifier[cluster_type] ,
identifier[config_dir] ,
)
keyword[except] identifier[MissingConfigurationError] :
keyword[pass]
keyword[if] keyword[not] identifier[topology] :
keyword[raise] identifier[MissingConfigurationError] (
literal[string] . identifier[format] ( identifier[cluster_type] ),
)
keyword[if] identifier[cluster_name] :
keyword[return] identifier[topology] . identifier[get_cluster_by_name] ( identifier[cluster_name] )
keyword[else] :
keyword[return] identifier[topology] . identifier[get_local_cluster] ()
|
def get_cluster_config(cluster_type, cluster_name=None, kafka_topology_base_path=None):
"""Return the cluster configuration.
Use the local cluster if cluster_name is not specified.
:param cluster_type: the type of the cluster
:type cluster_type: string
:param cluster_name: the name of the cluster
:type cluster_name: string
:param kafka_topology_base_path: base path to look for <cluster_type>.yaml
:type cluster_name: string
:returns: the cluster
:rtype: ClusterConfig
"""
if not kafka_topology_base_path:
config_dirs = get_conf_dirs() # depends on [control=['if'], data=[]]
else:
config_dirs = [kafka_topology_base_path]
topology = None
for config_dir in config_dirs:
try:
topology = TopologyConfiguration(cluster_type, config_dir) # depends on [control=['try'], data=[]]
except MissingConfigurationError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['config_dir']]
if not topology:
raise MissingConfigurationError('No available configuration for type {0}'.format(cluster_type)) # depends on [control=['if'], data=[]]
if cluster_name:
return topology.get_cluster_by_name(cluster_name) # depends on [control=['if'], data=[]]
else:
return topology.get_local_cluster()
|
def power(maf=0.5,beta=0.1, N=100, cutoff=5e-8):
"""
estimate power for a given allele frequency, effect size beta and sample size N
Assumption:
z-score = beta_ML distributed as p(0) = N(0,1.0(maf*(1-maf)*N))) under the null hypothesis
the actual beta_ML is distributed as p(alt) = N( beta , 1.0/(maf*(1-maf)N) )
Arguments:
maf: minor allele frequency of the SNP
beta: effect size of the SNP
N: sample size (number of individuals)
Returns:
power: probability to detect a SNP in that study with the given parameters
"""
"""
std(snp)=sqrt(2.0*maf*(1-maf))
power = \int
beta_ML = (snp^T*snp)^{-1}*snp^T*Y = cov(snp,Y)/var(snp)
E[beta_ML] = (snp^T*snp)^{-1}*snp^T*E[Y]
= (snp^T*snp)^{-1}*snp^T*snp * beta
= beta
Var[beta_ML]= (snp^T*snp)^{-1}*(snp^T*snp)*(snp^T*snp)^{-1}
= (snp^T*snp)^{-1}
= 1/N * var(snp)
= 1/N * maf*(1-maf)
"""
assert maf>=0.0 and maf<=0.5, "maf needs to be between 0.0 and 0.5, got %f" % maf
if beta<0.0:
beta=-beta
std_beta = 1.0/np.sqrt(N*(2.0 * maf*(1.0-maf)))
non_centrality = beta
beta_samples = np.random.normal(loc=non_centrality, scale=std_beta)
n_grid = 100000
beta_in = np.arange(0.5/(n_grid+1.0),(n_grid-0.5)/(n_grid+1.0),1.0/(n_grid+1.0))
beta_theoretical = ((st.norm.isf(beta_in)* std_beta) + non_centrality)
pvals = st.chi2.sf( (beta_theoretical/std_beta)*(beta_theoretical/std_beta) ,1.0)
power = (pvals<cutoff).mean()
return power, pvals
|
def function[power, parameter[maf, beta, N, cutoff]]:
constant[
estimate power for a given allele frequency, effect size beta and sample size N
Assumption:
z-score = beta_ML distributed as p(0) = N(0,1.0(maf*(1-maf)*N))) under the null hypothesis
the actual beta_ML is distributed as p(alt) = N( beta , 1.0/(maf*(1-maf)N) )
Arguments:
maf: minor allele frequency of the SNP
beta: effect size of the SNP
N: sample size (number of individuals)
Returns:
power: probability to detect a SNP in that study with the given parameters
]
constant[
std(snp)=sqrt(2.0*maf*(1-maf))
power = \int
beta_ML = (snp^T*snp)^{-1}*snp^T*Y = cov(snp,Y)/var(snp)
E[beta_ML] = (snp^T*snp)^{-1}*snp^T*E[Y]
= (snp^T*snp)^{-1}*snp^T*snp * beta
= beta
Var[beta_ML]= (snp^T*snp)^{-1}*(snp^T*snp)*(snp^T*snp)^{-1}
= (snp^T*snp)^{-1}
= 1/N * var(snp)
= 1/N * maf*(1-maf)
]
assert[<ast.BoolOp object at 0x7da18bc73dc0>]
if compare[name[beta] less[<] constant[0.0]] begin[:]
variable[beta] assign[=] <ast.UnaryOp object at 0x7da204963610>
variable[std_beta] assign[=] binary_operation[constant[1.0] / call[name[np].sqrt, parameter[binary_operation[name[N] * binary_operation[binary_operation[constant[2.0] * name[maf]] * binary_operation[constant[1.0] - name[maf]]]]]]]
variable[non_centrality] assign[=] name[beta]
variable[beta_samples] assign[=] call[name[np].random.normal, parameter[]]
variable[n_grid] assign[=] constant[100000]
variable[beta_in] assign[=] call[name[np].arange, parameter[binary_operation[constant[0.5] / binary_operation[name[n_grid] + constant[1.0]]], binary_operation[binary_operation[name[n_grid] - constant[0.5]] / binary_operation[name[n_grid] + constant[1.0]]], binary_operation[constant[1.0] / binary_operation[name[n_grid] + constant[1.0]]]]]
variable[beta_theoretical] assign[=] binary_operation[binary_operation[call[name[st].norm.isf, parameter[name[beta_in]]] * name[std_beta]] + name[non_centrality]]
variable[pvals] assign[=] call[name[st].chi2.sf, parameter[binary_operation[binary_operation[name[beta_theoretical] / name[std_beta]] * binary_operation[name[beta_theoretical] / name[std_beta]]], constant[1.0]]]
variable[power] assign[=] call[compare[name[pvals] less[<] name[cutoff]].mean, parameter[]]
return[tuple[[<ast.Name object at 0x7da20c9924a0>, <ast.Name object at 0x7da20c992b00>]]]
|
keyword[def] identifier[power] ( identifier[maf] = literal[int] , identifier[beta] = literal[int] , identifier[N] = literal[int] , identifier[cutoff] = literal[int] ):
literal[string]
literal[string]
keyword[assert] identifier[maf] >= literal[int] keyword[and] identifier[maf] <= literal[int] , literal[string] % identifier[maf]
keyword[if] identifier[beta] < literal[int] :
identifier[beta] =- identifier[beta]
identifier[std_beta] = literal[int] / identifier[np] . identifier[sqrt] ( identifier[N] *( literal[int] * identifier[maf] *( literal[int] - identifier[maf] )))
identifier[non_centrality] = identifier[beta]
identifier[beta_samples] = identifier[np] . identifier[random] . identifier[normal] ( identifier[loc] = identifier[non_centrality] , identifier[scale] = identifier[std_beta] )
identifier[n_grid] = literal[int]
identifier[beta_in] = identifier[np] . identifier[arange] ( literal[int] /( identifier[n_grid] + literal[int] ),( identifier[n_grid] - literal[int] )/( identifier[n_grid] + literal[int] ), literal[int] /( identifier[n_grid] + literal[int] ))
identifier[beta_theoretical] =(( identifier[st] . identifier[norm] . identifier[isf] ( identifier[beta_in] )* identifier[std_beta] )+ identifier[non_centrality] )
identifier[pvals] = identifier[st] . identifier[chi2] . identifier[sf] (( identifier[beta_theoretical] / identifier[std_beta] )*( identifier[beta_theoretical] / identifier[std_beta] ), literal[int] )
identifier[power] =( identifier[pvals] < identifier[cutoff] ). identifier[mean] ()
keyword[return] identifier[power] , identifier[pvals]
|
def power(maf=0.5, beta=0.1, N=100, cutoff=5e-08):
"""
estimate power for a given allele frequency, effect size beta and sample size N
Assumption:
z-score = beta_ML distributed as p(0) = N(0,1.0(maf*(1-maf)*N))) under the null hypothesis
the actual beta_ML is distributed as p(alt) = N( beta , 1.0/(maf*(1-maf)N) )
Arguments:
maf: minor allele frequency of the SNP
beta: effect size of the SNP
N: sample size (number of individuals)
Returns:
power: probability to detect a SNP in that study with the given parameters
"""
'\n\tstd(snp)=sqrt(2.0*maf*(1-maf)) \n\tpower = \\int \n\n\tbeta_ML = (snp^T*snp)^{-1}*snp^T*Y = cov(snp,Y)/var(snp) \n\tE[beta_ML]\t= (snp^T*snp)^{-1}*snp^T*E[Y] \n\t\t\t\t= (snp^T*snp)^{-1}*snp^T*snp * beta\n\t\t\t\t= beta\n\tVar[beta_ML]= (snp^T*snp)^{-1}*(snp^T*snp)*(snp^T*snp)^{-1}\n\t\t\t\t= (snp^T*snp)^{-1}\n\t\t\t\t= 1/N * var(snp)\n\t\t\t\t= 1/N * maf*(1-maf)\n\t'
assert maf >= 0.0 and maf <= 0.5, 'maf needs to be between 0.0 and 0.5, got %f' % maf
if beta < 0.0:
beta = -beta # depends on [control=['if'], data=['beta']]
std_beta = 1.0 / np.sqrt(N * (2.0 * maf * (1.0 - maf)))
non_centrality = beta
beta_samples = np.random.normal(loc=non_centrality, scale=std_beta)
n_grid = 100000
beta_in = np.arange(0.5 / (n_grid + 1.0), (n_grid - 0.5) / (n_grid + 1.0), 1.0 / (n_grid + 1.0))
beta_theoretical = st.norm.isf(beta_in) * std_beta + non_centrality
pvals = st.chi2.sf(beta_theoretical / std_beta * (beta_theoretical / std_beta), 1.0)
power = (pvals < cutoff).mean()
return (power, pvals)
|
def mat_to_numpy_arr(self):
''' convert list to numpy array - numpy arrays can not be saved as json '''
import numpy as np
self.dat['mat'] = np.asarray(self.dat['mat'])
|
def function[mat_to_numpy_arr, parameter[self]]:
constant[ convert list to numpy array - numpy arrays can not be saved as json ]
import module[numpy] as alias[np]
call[name[self].dat][constant[mat]] assign[=] call[name[np].asarray, parameter[call[name[self].dat][constant[mat]]]]
|
keyword[def] identifier[mat_to_numpy_arr] ( identifier[self] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
identifier[self] . identifier[dat] [ literal[string] ]= identifier[np] . identifier[asarray] ( identifier[self] . identifier[dat] [ literal[string] ])
|
def mat_to_numpy_arr(self):
""" convert list to numpy array - numpy arrays can not be saved as json """
import numpy as np
self.dat['mat'] = np.asarray(self.dat['mat'])
|
def read(config_file, configspec, server_mode=False, default_section='default_settings', list_values=True):
'''
Read the config file with spec validation
'''
# configspec = ConfigObj(path.join(path.abspath(path.dirname(__file__)), configspec),
# encoding='UTF8',
# interpolation='Template',
# list_values=False,
# _inspec=True)
config = ConfigObj(config_file,
configspec=path.join(path.abspath(path.dirname(__file__)),
configspec),
list_values=list_values)
validation = config.validate(validate.Validator(), preserve_errors=True)
if validation == True:
config = dict(config)
for section in config:
if section != default_section:
if server_mode: # When it's a servers config file, retrieve the correct fqdn
config[section]['availability'] = True
if config[section]['custom_fqdn'] == None:
config[section]['custom_fqdn'] = socket.getfqdn()
for option in config[section]: # retrieve default configuration for missing values
if config[section][option] == None:
config[section][option] = config[default_section][option]
del(config[default_section])
return config
else:
raise ConfiguratorException(config_file, validation)
|
def function[read, parameter[config_file, configspec, server_mode, default_section, list_values]]:
constant[
Read the config file with spec validation
]
variable[config] assign[=] call[name[ConfigObj], parameter[name[config_file]]]
variable[validation] assign[=] call[name[config].validate, parameter[call[name[validate].Validator, parameter[]]]]
if compare[name[validation] equal[==] constant[True]] begin[:]
variable[config] assign[=] call[name[dict], parameter[name[config]]]
for taget[name[section]] in starred[name[config]] begin[:]
if compare[name[section] not_equal[!=] name[default_section]] begin[:]
if name[server_mode] begin[:]
call[call[name[config]][name[section]]][constant[availability]] assign[=] constant[True]
if compare[call[call[name[config]][name[section]]][constant[custom_fqdn]] equal[==] constant[None]] begin[:]
call[call[name[config]][name[section]]][constant[custom_fqdn]] assign[=] call[name[socket].getfqdn, parameter[]]
for taget[name[option]] in starred[call[name[config]][name[section]]] begin[:]
if compare[call[call[name[config]][name[section]]][name[option]] equal[==] constant[None]] begin[:]
call[call[name[config]][name[section]]][name[option]] assign[=] call[call[name[config]][name[default_section]]][name[option]]
<ast.Delete object at 0x7da204620b50>
return[name[config]]
|
keyword[def] identifier[read] ( identifier[config_file] , identifier[configspec] , identifier[server_mode] = keyword[False] , identifier[default_section] = literal[string] , identifier[list_values] = keyword[True] ):
literal[string]
identifier[config] = identifier[ConfigObj] ( identifier[config_file] ,
identifier[configspec] = identifier[path] . identifier[join] ( identifier[path] . identifier[abspath] ( identifier[path] . identifier[dirname] ( identifier[__file__] )),
identifier[configspec] ),
identifier[list_values] = identifier[list_values] )
identifier[validation] = identifier[config] . identifier[validate] ( identifier[validate] . identifier[Validator] (), identifier[preserve_errors] = keyword[True] )
keyword[if] identifier[validation] == keyword[True] :
identifier[config] = identifier[dict] ( identifier[config] )
keyword[for] identifier[section] keyword[in] identifier[config] :
keyword[if] identifier[section] != identifier[default_section] :
keyword[if] identifier[server_mode] :
identifier[config] [ identifier[section] ][ literal[string] ]= keyword[True]
keyword[if] identifier[config] [ identifier[section] ][ literal[string] ]== keyword[None] :
identifier[config] [ identifier[section] ][ literal[string] ]= identifier[socket] . identifier[getfqdn] ()
keyword[for] identifier[option] keyword[in] identifier[config] [ identifier[section] ]:
keyword[if] identifier[config] [ identifier[section] ][ identifier[option] ]== keyword[None] :
identifier[config] [ identifier[section] ][ identifier[option] ]= identifier[config] [ identifier[default_section] ][ identifier[option] ]
keyword[del] ( identifier[config] [ identifier[default_section] ])
keyword[return] identifier[config]
keyword[else] :
keyword[raise] identifier[ConfiguratorException] ( identifier[config_file] , identifier[validation] )
|
def read(config_file, configspec, server_mode=False, default_section='default_settings', list_values=True):
"""
Read the config file with spec validation
"""
# configspec = ConfigObj(path.join(path.abspath(path.dirname(__file__)), configspec),
# encoding='UTF8',
# interpolation='Template',
# list_values=False,
# _inspec=True)
config = ConfigObj(config_file, configspec=path.join(path.abspath(path.dirname(__file__)), configspec), list_values=list_values)
validation = config.validate(validate.Validator(), preserve_errors=True)
if validation == True:
config = dict(config)
for section in config:
if section != default_section:
if server_mode: # When it's a servers config file, retrieve the correct fqdn
config[section]['availability'] = True
if config[section]['custom_fqdn'] == None:
config[section]['custom_fqdn'] = socket.getfqdn() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for option in config[section]: # retrieve default configuration for missing values
if config[section][option] == None:
config[section][option] = config[default_section][option] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['option']] # depends on [control=['if'], data=['section', 'default_section']] # depends on [control=['for'], data=['section']]
del config[default_section]
return config # depends on [control=['if'], data=[]]
else:
raise ConfiguratorException(config_file, validation)
|
def serviceQueues(self, limit=None):
"""
Process `limit` number of messages in the inBox.
:param limit: the maximum number of messages to process
:return: the number of messages successfully processed
"""
# TODO should handle SuspiciousNode here
r = self.dequeue_pre_prepares()
r += self.inBoxRouter.handleAllSync(self.inBox, limit)
r += self.send_3pc_batch()
r += self._serviceActions()
return r
|
def function[serviceQueues, parameter[self, limit]]:
constant[
Process `limit` number of messages in the inBox.
:param limit: the maximum number of messages to process
:return: the number of messages successfully processed
]
variable[r] assign[=] call[name[self].dequeue_pre_prepares, parameter[]]
<ast.AugAssign object at 0x7da1b16c3fd0>
<ast.AugAssign object at 0x7da1b16c0820>
<ast.AugAssign object at 0x7da1b16c2c50>
return[name[r]]
|
keyword[def] identifier[serviceQueues] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
identifier[r] = identifier[self] . identifier[dequeue_pre_prepares] ()
identifier[r] += identifier[self] . identifier[inBoxRouter] . identifier[handleAllSync] ( identifier[self] . identifier[inBox] , identifier[limit] )
identifier[r] += identifier[self] . identifier[send_3pc_batch] ()
identifier[r] += identifier[self] . identifier[_serviceActions] ()
keyword[return] identifier[r]
|
def serviceQueues(self, limit=None):
"""
Process `limit` number of messages in the inBox.
:param limit: the maximum number of messages to process
:return: the number of messages successfully processed
"""
# TODO should handle SuspiciousNode here
r = self.dequeue_pre_prepares()
r += self.inBoxRouter.handleAllSync(self.inBox, limit)
r += self.send_3pc_batch()
r += self._serviceActions()
return r
|
def surfacemass_z(self,R,nz=7,zmax=1.,fixed_quad=True,fixed_order=8,
**kwargs):
"""
NAME:
surfacemass_z
PURPOSE:
calculate the vertically-integrated surface density
INPUT:
R - Galactocentric radius (can be Quantity)
fixed_quad= if True (default), use Gauss-Legendre integration
fixed_order= (20), order of GL integration to use
nz= number of zs to use to estimate
zmax= maximum z to use (can be Quantity)
density kwargs
OUTPUT:
\Sigma(R)
HISTORY:
2012-08-30 - Written - Bovy (IAS)
"""
if fixed_quad:
return 2.*integrate.fixed_quad(lambda x: self.density(R*numpy.ones(fixed_order),x,use_physical=False),
0.,.5,n=fixed_order)[0]
zs= numpy.linspace(0.,zmax,nz)
sf= numpy.array([self.density(R,z,use_physical=False,
**kwargs) for z in zs])
lsf= numpy.log(sf)
#Interpolate
lsfInterp= interpolate.UnivariateSpline(zs,
lsf,
k=3)
#Integrate
return 2.*integrate.quad((lambda x: numpy.exp(lsfInterp(x))),
0.,1.)[0]
|
def function[surfacemass_z, parameter[self, R, nz, zmax, fixed_quad, fixed_order]]:
constant[
NAME:
surfacemass_z
PURPOSE:
calculate the vertically-integrated surface density
INPUT:
R - Galactocentric radius (can be Quantity)
fixed_quad= if True (default), use Gauss-Legendre integration
fixed_order= (20), order of GL integration to use
nz= number of zs to use to estimate
zmax= maximum z to use (can be Quantity)
density kwargs
OUTPUT:
\Sigma(R)
HISTORY:
2012-08-30 - Written - Bovy (IAS)
]
if name[fixed_quad] begin[:]
return[binary_operation[constant[2.0] * call[call[name[integrate].fixed_quad, parameter[<ast.Lambda object at 0x7da1b0c4c1c0>, constant[0.0], constant[0.5]]]][constant[0]]]]
variable[zs] assign[=] call[name[numpy].linspace, parameter[constant[0.0], name[zmax], name[nz]]]
variable[sf] assign[=] call[name[numpy].array, parameter[<ast.ListComp object at 0x7da1b0c65cc0>]]
variable[lsf] assign[=] call[name[numpy].log, parameter[name[sf]]]
variable[lsfInterp] assign[=] call[name[interpolate].UnivariateSpline, parameter[name[zs], name[lsf]]]
return[binary_operation[constant[2.0] * call[call[name[integrate].quad, parameter[<ast.Lambda object at 0x7da1b0c64520>, constant[0.0], constant[1.0]]]][constant[0]]]]
|
keyword[def] identifier[surfacemass_z] ( identifier[self] , identifier[R] , identifier[nz] = literal[int] , identifier[zmax] = literal[int] , identifier[fixed_quad] = keyword[True] , identifier[fixed_order] = literal[int] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[fixed_quad] :
keyword[return] literal[int] * identifier[integrate] . identifier[fixed_quad] ( keyword[lambda] identifier[x] : identifier[self] . identifier[density] ( identifier[R] * identifier[numpy] . identifier[ones] ( identifier[fixed_order] ), identifier[x] , identifier[use_physical] = keyword[False] ),
literal[int] , literal[int] , identifier[n] = identifier[fixed_order] )[ literal[int] ]
identifier[zs] = identifier[numpy] . identifier[linspace] ( literal[int] , identifier[zmax] , identifier[nz] )
identifier[sf] = identifier[numpy] . identifier[array] ([ identifier[self] . identifier[density] ( identifier[R] , identifier[z] , identifier[use_physical] = keyword[False] ,
** identifier[kwargs] ) keyword[for] identifier[z] keyword[in] identifier[zs] ])
identifier[lsf] = identifier[numpy] . identifier[log] ( identifier[sf] )
identifier[lsfInterp] = identifier[interpolate] . identifier[UnivariateSpline] ( identifier[zs] ,
identifier[lsf] ,
identifier[k] = literal[int] )
keyword[return] literal[int] * identifier[integrate] . identifier[quad] (( keyword[lambda] identifier[x] : identifier[numpy] . identifier[exp] ( identifier[lsfInterp] ( identifier[x] ))),
literal[int] , literal[int] )[ literal[int] ]
|
def surfacemass_z(self, R, nz=7, zmax=1.0, fixed_quad=True, fixed_order=8, **kwargs):
"""
NAME:
surfacemass_z
PURPOSE:
calculate the vertically-integrated surface density
INPUT:
R - Galactocentric radius (can be Quantity)
fixed_quad= if True (default), use Gauss-Legendre integration
fixed_order= (20), order of GL integration to use
nz= number of zs to use to estimate
zmax= maximum z to use (can be Quantity)
density kwargs
OUTPUT:
\\Sigma(R)
HISTORY:
2012-08-30 - Written - Bovy (IAS)
"""
if fixed_quad:
return 2.0 * integrate.fixed_quad(lambda x: self.density(R * numpy.ones(fixed_order), x, use_physical=False), 0.0, 0.5, n=fixed_order)[0] # depends on [control=['if'], data=[]]
zs = numpy.linspace(0.0, zmax, nz)
sf = numpy.array([self.density(R, z, use_physical=False, **kwargs) for z in zs])
lsf = numpy.log(sf)
#Interpolate
lsfInterp = interpolate.UnivariateSpline(zs, lsf, k=3)
#Integrate
return 2.0 * integrate.quad(lambda x: numpy.exp(lsfInterp(x)), 0.0, 1.0)[0]
|
def plot_whiteness(var, h, repeats=1000, axis=None):
""" Draw distribution of the Portmanteu whiteness test.
Parameters
----------
var : :class:`~scot.var.VARBase`-like object
Vector autoregressive model (VAR) object whose residuals are tested for whiteness.
h : int
Maximum lag to include in the test.
repeats : int, optional
Number of surrogate estimates to draw under the null hypothesis.
axis : axis, optional
Axis to draw into. By default draws into :func:`matplotlib.pyplot.gca()`.
Returns
-------
pr : float
*p*-value of whiteness under the null hypothesis
"""
pr, q0, q = var.test_whiteness(h, repeats, True)
if axis is None:
axis = current_axis()
pdf, _, _ = axis.hist(q0, 30, normed=True, label='surrogate distribution')
axis.plot([q,q], [0,np.max(pdf)], 'r-', label='fitted model')
#df = m*m*(h-p)
#x = np.linspace(np.min(q0)*0.0, np.max(q0)*2.0, 100)
#y = sp.stats.chi2.pdf(x, df)
#hc = axis.plot(x, y, label='chi-squared distribution (df=%i)' % df)
axis.set_title('significance: p = %f'%pr)
axis.set_xlabel('Li-McLeod statistic (Q)')
axis.set_ylabel('probability')
axis.legend()
return pr
|
def function[plot_whiteness, parameter[var, h, repeats, axis]]:
constant[ Draw distribution of the Portmanteu whiteness test.
Parameters
----------
var : :class:`~scot.var.VARBase`-like object
Vector autoregressive model (VAR) object whose residuals are tested for whiteness.
h : int
Maximum lag to include in the test.
repeats : int, optional
Number of surrogate estimates to draw under the null hypothesis.
axis : axis, optional
Axis to draw into. By default draws into :func:`matplotlib.pyplot.gca()`.
Returns
-------
pr : float
*p*-value of whiteness under the null hypothesis
]
<ast.Tuple object at 0x7da1b26076d0> assign[=] call[name[var].test_whiteness, parameter[name[h], name[repeats], constant[True]]]
if compare[name[axis] is constant[None]] begin[:]
variable[axis] assign[=] call[name[current_axis], parameter[]]
<ast.Tuple object at 0x7da1b26640d0> assign[=] call[name[axis].hist, parameter[name[q0], constant[30]]]
call[name[axis].plot, parameter[list[[<ast.Name object at 0x7da1b2664430>, <ast.Name object at 0x7da1b2664460>]], list[[<ast.Constant object at 0x7da1b26644c0>, <ast.Call object at 0x7da1b26644f0>]], constant[r-]]]
call[name[axis].set_title, parameter[binary_operation[constant[significance: p = %f] <ast.Mod object at 0x7da2590d6920> name[pr]]]]
call[name[axis].set_xlabel, parameter[constant[Li-McLeod statistic (Q)]]]
call[name[axis].set_ylabel, parameter[constant[probability]]]
call[name[axis].legend, parameter[]]
return[name[pr]]
|
keyword[def] identifier[plot_whiteness] ( identifier[var] , identifier[h] , identifier[repeats] = literal[int] , identifier[axis] = keyword[None] ):
literal[string]
identifier[pr] , identifier[q0] , identifier[q] = identifier[var] . identifier[test_whiteness] ( identifier[h] , identifier[repeats] , keyword[True] )
keyword[if] identifier[axis] keyword[is] keyword[None] :
identifier[axis] = identifier[current_axis] ()
identifier[pdf] , identifier[_] , identifier[_] = identifier[axis] . identifier[hist] ( identifier[q0] , literal[int] , identifier[normed] = keyword[True] , identifier[label] = literal[string] )
identifier[axis] . identifier[plot] ([ identifier[q] , identifier[q] ],[ literal[int] , identifier[np] . identifier[max] ( identifier[pdf] )], literal[string] , identifier[label] = literal[string] )
identifier[axis] . identifier[set_title] ( literal[string] % identifier[pr] )
identifier[axis] . identifier[set_xlabel] ( literal[string] )
identifier[axis] . identifier[set_ylabel] ( literal[string] )
identifier[axis] . identifier[legend] ()
keyword[return] identifier[pr]
|
def plot_whiteness(var, h, repeats=1000, axis=None):
""" Draw distribution of the Portmanteu whiteness test.
Parameters
----------
var : :class:`~scot.var.VARBase`-like object
Vector autoregressive model (VAR) object whose residuals are tested for whiteness.
h : int
Maximum lag to include in the test.
repeats : int, optional
Number of surrogate estimates to draw under the null hypothesis.
axis : axis, optional
Axis to draw into. By default draws into :func:`matplotlib.pyplot.gca()`.
Returns
-------
pr : float
*p*-value of whiteness under the null hypothesis
"""
(pr, q0, q) = var.test_whiteness(h, repeats, True)
if axis is None:
axis = current_axis() # depends on [control=['if'], data=['axis']]
(pdf, _, _) = axis.hist(q0, 30, normed=True, label='surrogate distribution')
axis.plot([q, q], [0, np.max(pdf)], 'r-', label='fitted model')
#df = m*m*(h-p)
#x = np.linspace(np.min(q0)*0.0, np.max(q0)*2.0, 100)
#y = sp.stats.chi2.pdf(x, df)
#hc = axis.plot(x, y, label='chi-squared distribution (df=%i)' % df)
axis.set_title('significance: p = %f' % pr)
axis.set_xlabel('Li-McLeod statistic (Q)')
axis.set_ylabel('probability')
axis.legend()
return pr
|
def loop(self):
'''
Tracks the time in a loop. The estimated time to completion
can be calculated and if verbose is set to *True*, the object will print
estimated time to completion, and percent complete.
Actived in every loop to keep track'''
self.count += 1
self.tf = time.time()
self.elapsed = self.tf - self.ti
if self.verbose:
displayAll(self.elapsed, self.display_amt, self.est_end,
self.nLoops, self.count, self.numPrints)
|
def function[loop, parameter[self]]:
constant[
Tracks the time in a loop. The estimated time to completion
can be calculated and if verbose is set to *True*, the object will print
estimated time to completion, and percent complete.
Actived in every loop to keep track]
<ast.AugAssign object at 0x7da20c795d80>
name[self].tf assign[=] call[name[time].time, parameter[]]
name[self].elapsed assign[=] binary_operation[name[self].tf - name[self].ti]
if name[self].verbose begin[:]
call[name[displayAll], parameter[name[self].elapsed, name[self].display_amt, name[self].est_end, name[self].nLoops, name[self].count, name[self].numPrints]]
|
keyword[def] identifier[loop] ( identifier[self] ):
literal[string]
identifier[self] . identifier[count] += literal[int]
identifier[self] . identifier[tf] = identifier[time] . identifier[time] ()
identifier[self] . identifier[elapsed] = identifier[self] . identifier[tf] - identifier[self] . identifier[ti]
keyword[if] identifier[self] . identifier[verbose] :
identifier[displayAll] ( identifier[self] . identifier[elapsed] , identifier[self] . identifier[display_amt] , identifier[self] . identifier[est_end] ,
identifier[self] . identifier[nLoops] , identifier[self] . identifier[count] , identifier[self] . identifier[numPrints] )
|
def loop(self):
"""
Tracks the time in a loop. The estimated time to completion
can be calculated and if verbose is set to *True*, the object will print
estimated time to completion, and percent complete.
Actived in every loop to keep track"""
self.count += 1
self.tf = time.time()
self.elapsed = self.tf - self.ti
if self.verbose:
displayAll(self.elapsed, self.display_amt, self.est_end, self.nLoops, self.count, self.numPrints) # depends on [control=['if'], data=[]]
|
def radius_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
key = ET.SubElement(host, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[radius_server_host_key, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[radius_server] assign[=] call[name[ET].SubElement, parameter[name[config], constant[radius-server]]]
variable[host] assign[=] call[name[ET].SubElement, parameter[name[radius_server], constant[host]]]
variable[hostname_key] assign[=] call[name[ET].SubElement, parameter[name[host], constant[hostname]]]
name[hostname_key].text assign[=] call[name[kwargs].pop, parameter[constant[hostname]]]
variable[key] assign[=] call[name[ET].SubElement, parameter[name[host], constant[key]]]
name[key].text assign[=] call[name[kwargs].pop, parameter[constant[key]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[radius_server_host_key] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[radius_server] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[host] = identifier[ET] . identifier[SubElement] ( identifier[radius_server] , literal[string] )
identifier[hostname_key] = identifier[ET] . identifier[SubElement] ( identifier[host] , literal[string] )
identifier[hostname_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[key] = identifier[ET] . identifier[SubElement] ( identifier[host] , literal[string] )
identifier[key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def radius_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
radius_server = ET.SubElement(config, 'radius-server', xmlns='urn:brocade.com:mgmt:brocade-aaa')
host = ET.SubElement(radius_server, 'host')
hostname_key = ET.SubElement(host, 'hostname')
hostname_key.text = kwargs.pop('hostname')
key = ET.SubElement(host, 'key')
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def load(self, callback=None, errback=None, reload=False):
"""
Load Scopegroup data from the API.
"""
if not reload and self.data:
raise ScopegroupException('Scope Group already loaded')
def success(result, *args):
self.data = result
self.id = result['id']
self.dhcp4 = result['dhcp4']
self.dhcp6 = result['dhcp6']
self.name = result['name']
self.service_group_id = result['service_group_id']
if callback:
return callback(self)
else:
return self
if self.id is None:
if self.dhcp4 is None or self.dhcp6 is None or self.name is None or self.service_group_id is None:
raise AddressException('Must at least specify an id or name and service_group_id')
else:
try:
self.id = [scope_group for scope_group in self._rest.list() if scope_group['name'] == self.name and
scope_group['service_group_id'] == self.service_group_id][0]['id']
except IndexError:
raise AddressException("Could not find Scope Group by name and service_group_id. It may not exist")
return self._rest.retrieve(self.id, callback=success,
errback=errback)
|
def function[load, parameter[self, callback, errback, reload]]:
constant[
Load Scopegroup data from the API.
]
if <ast.BoolOp object at 0x7da204621de0> begin[:]
<ast.Raise object at 0x7da204620af0>
def function[success, parameter[result]]:
name[self].data assign[=] name[result]
name[self].id assign[=] call[name[result]][constant[id]]
name[self].dhcp4 assign[=] call[name[result]][constant[dhcp4]]
name[self].dhcp6 assign[=] call[name[result]][constant[dhcp6]]
name[self].name assign[=] call[name[result]][constant[name]]
name[self].service_group_id assign[=] call[name[result]][constant[service_group_id]]
if name[callback] begin[:]
return[call[name[callback], parameter[name[self]]]]
if compare[name[self].id is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da204622bf0> begin[:]
<ast.Raise object at 0x7da204622fb0>
return[call[name[self]._rest.retrieve, parameter[name[self].id]]]
|
keyword[def] identifier[load] ( identifier[self] , identifier[callback] = keyword[None] , identifier[errback] = keyword[None] , identifier[reload] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[reload] keyword[and] identifier[self] . identifier[data] :
keyword[raise] identifier[ScopegroupException] ( literal[string] )
keyword[def] identifier[success] ( identifier[result] ,* identifier[args] ):
identifier[self] . identifier[data] = identifier[result]
identifier[self] . identifier[id] = identifier[result] [ literal[string] ]
identifier[self] . identifier[dhcp4] = identifier[result] [ literal[string] ]
identifier[self] . identifier[dhcp6] = identifier[result] [ literal[string] ]
identifier[self] . identifier[name] = identifier[result] [ literal[string] ]
identifier[self] . identifier[service_group_id] = identifier[result] [ literal[string] ]
keyword[if] identifier[callback] :
keyword[return] identifier[callback] ( identifier[self] )
keyword[else] :
keyword[return] identifier[self]
keyword[if] identifier[self] . identifier[id] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[dhcp4] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[dhcp6] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[name] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[service_group_id] keyword[is] keyword[None] :
keyword[raise] identifier[AddressException] ( literal[string] )
keyword[else] :
keyword[try] :
identifier[self] . identifier[id] =[ identifier[scope_group] keyword[for] identifier[scope_group] keyword[in] identifier[self] . identifier[_rest] . identifier[list] () keyword[if] identifier[scope_group] [ literal[string] ]== identifier[self] . identifier[name] keyword[and]
identifier[scope_group] [ literal[string] ]== identifier[self] . identifier[service_group_id] ][ literal[int] ][ literal[string] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[AddressException] ( literal[string] )
keyword[return] identifier[self] . identifier[_rest] . identifier[retrieve] ( identifier[self] . identifier[id] , identifier[callback] = identifier[success] ,
identifier[errback] = identifier[errback] )
|
def load(self, callback=None, errback=None, reload=False):
"""
Load Scopegroup data from the API.
"""
if not reload and self.data:
raise ScopegroupException('Scope Group already loaded') # depends on [control=['if'], data=[]]
def success(result, *args):
self.data = result
self.id = result['id']
self.dhcp4 = result['dhcp4']
self.dhcp6 = result['dhcp6']
self.name = result['name']
self.service_group_id = result['service_group_id']
if callback:
return callback(self) # depends on [control=['if'], data=[]]
else:
return self
if self.id is None:
if self.dhcp4 is None or self.dhcp6 is None or self.name is None or (self.service_group_id is None):
raise AddressException('Must at least specify an id or name and service_group_id') # depends on [control=['if'], data=[]]
else:
try:
self.id = [scope_group for scope_group in self._rest.list() if scope_group['name'] == self.name and scope_group['service_group_id'] == self.service_group_id][0]['id'] # depends on [control=['try'], data=[]]
except IndexError:
raise AddressException('Could not find Scope Group by name and service_group_id. It may not exist') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return self._rest.retrieve(self.id, callback=success, errback=errback)
|
def ids_for(self, city_name, country=None, matching='nocase'):
"""
Returns a list of tuples in the form (long, str, str) corresponding to
the int IDs and relative toponyms and 2-chars country of the cities
matching the provided city name.
The rule for identifying matchings is according to the provided
`matching` parameter value.
If `country` is provided, the search is restricted to the cities of
the specified country.
:param country: two character str representing the country where to
search for the city. Defaults to `None`, which means: search in all
countries.
:param matching: str among `exact` (literal, case-sensitive matching),
`nocase` (literal, case-insensitive matching) and `like` (matches cities
whose name contains as a substring the string fed to the function, no
matter the case). Defaults to `nocase`.
:raises ValueError if the value for `matching` is unknown
:return: list of tuples
"""
if not city_name:
return []
if matching not in self.MATCHINGS:
raise ValueError("Unknown type of matching: "
"allowed values are %s" % ", ".join(self.MATCHINGS))
if country is not None and len(country) != 2:
raise ValueError("Country must be a 2-char string")
splits = self._filter_matching_lines(city_name, country, matching)
return [(int(item[1]), item[0], item[4]) for item in splits]
|
def function[ids_for, parameter[self, city_name, country, matching]]:
constant[
Returns a list of tuples in the form (long, str, str) corresponding to
the int IDs and relative toponyms and 2-chars country of the cities
matching the provided city name.
The rule for identifying matchings is according to the provided
`matching` parameter value.
If `country` is provided, the search is restricted to the cities of
the specified country.
:param country: two character str representing the country where to
search for the city. Defaults to `None`, which means: search in all
countries.
:param matching: str among `exact` (literal, case-sensitive matching),
`nocase` (literal, case-insensitive matching) and `like` (matches cities
whose name contains as a substring the string fed to the function, no
matter the case). Defaults to `nocase`.
:raises ValueError if the value for `matching` is unknown
:return: list of tuples
]
if <ast.UnaryOp object at 0x7da18f812560> begin[:]
return[list[[]]]
if compare[name[matching] <ast.NotIn object at 0x7da2590d7190> name[self].MATCHINGS] begin[:]
<ast.Raise object at 0x7da18f813040>
if <ast.BoolOp object at 0x7da18f8107c0> begin[:]
<ast.Raise object at 0x7da18f813610>
variable[splits] assign[=] call[name[self]._filter_matching_lines, parameter[name[city_name], name[country], name[matching]]]
return[<ast.ListComp object at 0x7da18f8130d0>]
|
keyword[def] identifier[ids_for] ( identifier[self] , identifier[city_name] , identifier[country] = keyword[None] , identifier[matching] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[city_name] :
keyword[return] []
keyword[if] identifier[matching] keyword[not] keyword[in] identifier[self] . identifier[MATCHINGS] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[MATCHINGS] ))
keyword[if] identifier[country] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[country] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[splits] = identifier[self] . identifier[_filter_matching_lines] ( identifier[city_name] , identifier[country] , identifier[matching] )
keyword[return] [( identifier[int] ( identifier[item] [ literal[int] ]), identifier[item] [ literal[int] ], identifier[item] [ literal[int] ]) keyword[for] identifier[item] keyword[in] identifier[splits] ]
|
def ids_for(self, city_name, country=None, matching='nocase'):
"""
Returns a list of tuples in the form (long, str, str) corresponding to
the int IDs and relative toponyms and 2-chars country of the cities
matching the provided city name.
The rule for identifying matchings is according to the provided
`matching` parameter value.
If `country` is provided, the search is restricted to the cities of
the specified country.
:param country: two character str representing the country where to
search for the city. Defaults to `None`, which means: search in all
countries.
:param matching: str among `exact` (literal, case-sensitive matching),
`nocase` (literal, case-insensitive matching) and `like` (matches cities
whose name contains as a substring the string fed to the function, no
matter the case). Defaults to `nocase`.
:raises ValueError if the value for `matching` is unknown
:return: list of tuples
"""
if not city_name:
return [] # depends on [control=['if'], data=[]]
if matching not in self.MATCHINGS:
raise ValueError('Unknown type of matching: allowed values are %s' % ', '.join(self.MATCHINGS)) # depends on [control=['if'], data=[]]
if country is not None and len(country) != 2:
raise ValueError('Country must be a 2-char string') # depends on [control=['if'], data=[]]
splits = self._filter_matching_lines(city_name, country, matching)
return [(int(item[1]), item[0], item[4]) for item in splits]
|
def get_element_with_id(self, id):
"""Return the element with the specified ID."""
# Should we maintain a hashmap of ids to make this more efficient? Probably overkill.
# TODO: Elements can contain nested elements (captions, footnotes, table cells, etc.)
return next((el for el in self.elements if el.id == id), None)
|
def function[get_element_with_id, parameter[self, id]]:
constant[Return the element with the specified ID.]
return[call[name[next], parameter[<ast.GeneratorExp object at 0x7da18fe90a90>, constant[None]]]]
|
keyword[def] identifier[get_element_with_id] ( identifier[self] , identifier[id] ):
literal[string]
keyword[return] identifier[next] (( identifier[el] keyword[for] identifier[el] keyword[in] identifier[self] . identifier[elements] keyword[if] identifier[el] . identifier[id] == identifier[id] ), keyword[None] )
|
def get_element_with_id(self, id):
"""Return the element with the specified ID."""
# Should we maintain a hashmap of ids to make this more efficient? Probably overkill.
# TODO: Elements can contain nested elements (captions, footnotes, table cells, etc.)
return next((el for el in self.elements if el.id == id), None)
|
def download_tabular_key_value(self, url, **kwargs):
# type: (str, Any) -> Dict
"""Download 2 column csv from url and return a dictionary of keys (first column) and values (second column)
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict: Dictionary keys (first column) and values (second column)
"""
output_dict = dict()
for row in self.get_tabular_rows(url, **kwargs):
if len(row) < 2:
continue
output_dict[row[0]] = row[1]
return output_dict
|
def function[download_tabular_key_value, parameter[self, url]]:
constant[Download 2 column csv from url and return a dictionary of keys (first column) and values (second column)
Args:
url (str): URL to download
**kwargs:
headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers
file_type (Optional[str]): Type of file. Defaults to inferring.
delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.
Returns:
Dict: Dictionary keys (first column) and values (second column)
]
variable[output_dict] assign[=] call[name[dict], parameter[]]
for taget[name[row]] in starred[call[name[self].get_tabular_rows, parameter[name[url]]]] begin[:]
if compare[call[name[len], parameter[name[row]]] less[<] constant[2]] begin[:]
continue
call[name[output_dict]][call[name[row]][constant[0]]] assign[=] call[name[row]][constant[1]]
return[name[output_dict]]
|
keyword[def] identifier[download_tabular_key_value] ( identifier[self] , identifier[url] ,** identifier[kwargs] ):
literal[string]
identifier[output_dict] = identifier[dict] ()
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[get_tabular_rows] ( identifier[url] ,** identifier[kwargs] ):
keyword[if] identifier[len] ( identifier[row] )< literal[int] :
keyword[continue]
identifier[output_dict] [ identifier[row] [ literal[int] ]]= identifier[row] [ literal[int] ]
keyword[return] identifier[output_dict]
|
def download_tabular_key_value(self, url, **kwargs):
# type: (str, Any) -> Dict
'Download 2 column csv from url and return a dictionary of keys (first column) and values (second column)\n\n Args:\n url (str): URL to download\n **kwargs:\n headers (Union[int, List[int], List[str]]): Number of row(s) containing headers or list of headers\n file_type (Optional[str]): Type of file. Defaults to inferring.\n delimiter (Optional[str]): Delimiter used for values in each row. Defaults to inferring.\n\n Returns:\n Dict: Dictionary keys (first column) and values (second column)\n\n '
output_dict = dict()
for row in self.get_tabular_rows(url, **kwargs):
if len(row) < 2:
continue # depends on [control=['if'], data=[]]
output_dict[row[0]] = row[1] # depends on [control=['for'], data=['row']]
return output_dict
|
def check(text):
"""Check the text."""
err = "oxymorons.misc"
msg = u"'{}' is an oxymoron."
oxymorons = [
"amateur expert",
"increasingly less",
"advancing backwards?",
"alludes explicitly to",
"explicitly alludes to",
"totally obsolescent",
"completely obsolescent",
"generally always",
"usually always",
"increasingly less",
"build down",
"conspicuous absence",
"exact estimate",
"found missing",
"intense apathy",
"mandatory choice",
"nonworking mother",
"organized mess",
# "pretty ugly",
# "sure bet",
# "executive secretary",
]
return existence_check(text, oxymorons, err, msg, offset=1, join=True)
|
def function[check, parameter[text]]:
constant[Check the text.]
variable[err] assign[=] constant[oxymorons.misc]
variable[msg] assign[=] constant['{}' is an oxymoron.]
variable[oxymorons] assign[=] list[[<ast.Constant object at 0x7da1b0862680>, <ast.Constant object at 0x7da1b0862740>, <ast.Constant object at 0x7da1b0860250>, <ast.Constant object at 0x7da1b08606d0>, <ast.Constant object at 0x7da1b0863ca0>, <ast.Constant object at 0x7da1b0862dd0>, <ast.Constant object at 0x7da1b0863850>, <ast.Constant object at 0x7da1b08613f0>, <ast.Constant object at 0x7da1b0860790>, <ast.Constant object at 0x7da1b0860e80>, <ast.Constant object at 0x7da1b08631f0>, <ast.Constant object at 0x7da1b0863dc0>, <ast.Constant object at 0x7da1b0862bc0>, <ast.Constant object at 0x7da1b0863af0>, <ast.Constant object at 0x7da1b08614e0>, <ast.Constant object at 0x7da1b0863040>, <ast.Constant object at 0x7da1b0863df0>, <ast.Constant object at 0x7da1b0860d30>]]
return[call[name[existence_check], parameter[name[text], name[oxymorons], name[err], name[msg]]]]
|
keyword[def] identifier[check] ( identifier[text] ):
literal[string]
identifier[err] = literal[string]
identifier[msg] = literal[string]
identifier[oxymorons] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[return] identifier[existence_check] ( identifier[text] , identifier[oxymorons] , identifier[err] , identifier[msg] , identifier[offset] = literal[int] , identifier[join] = keyword[True] )
|
def check(text):
"""Check the text."""
err = 'oxymorons.misc'
msg = u"'{}' is an oxymoron."
# "pretty ugly",
# "sure bet",
# "executive secretary",
oxymorons = ['amateur expert', 'increasingly less', 'advancing backwards?', 'alludes explicitly to', 'explicitly alludes to', 'totally obsolescent', 'completely obsolescent', 'generally always', 'usually always', 'increasingly less', 'build down', 'conspicuous absence', 'exact estimate', 'found missing', 'intense apathy', 'mandatory choice', 'nonworking mother', 'organized mess']
return existence_check(text, oxymorons, err, msg, offset=1, join=True)
|
def simulate(t=1000, poly=(0.,), sinusoids=None, sigma=0, rw=0, irw=0, rrw=0):
"""Simulate a random signal with seasonal (sinusoids), linear and quadratic trend, RW, IRW, and RRW
Arguments:
t (int or list of float): number of samples or time vector, default = 1000
poly (list of float): polynomial coefficients (in decreasing "order") passed to `numpy.polyval`
i.e. poly[0]*x**(N-1) + ... + poly[N-1]
sinusoids (list of list): [[period], [amplitude, period], or [ampl., period, phase]]
>>> len(simulate(poly=(0,),rrw=1))
1000
>>> simulate(t=range(3), poly=(1,2)) # doctest: +NORMALIZE_WHITESPACE
0 2
1 3
2 4
dtype: float64
>>> all(simulate(t=50, sinusoids=((1,2,3),)) == simulate(t=range(50), sinusoids=((1,2,3),)))
True
>>> any(simulate(t=100))
False
>>> abs(simulate(sinusoids=42.42).values[1] + simulate(sinusoids=42.42).values[-1]) < 1e-10
True
>>> simulate(t=17,sinusoids=[42, 16]).min()
-42.0
>>> all((simulate(t=range(10), sinusoids=(1, 9, 4.5))+simulate(t=10, sinusoids=(1,9))).abs() < 1e-10)
True
"""
if t and isinstance(t, int):
t = np.arange(t, dtype=np.float64)
else:
t = np.array(t, dtype=np.float64)
N = len(t)
poly = poly or (0.,)
poly = listify(poly)
y = np.polyval(poly, t)
sinusoids = listify(sinusoids or [])
if any(isinstance(ATP, (int, float)) for ATP in sinusoids):
sinusoids = [sinusoids]
for ATP in sinusoids:
# default period is 1 more than the length of the simulated series (no values of the cycle are repeated)
T = (t[-1] - t[0]) * N / (N - 1.)
# default amplitude is 1 and phase is 0
A, P = 1., 0
try:
A, T, P = ATP
except (TypeError, ValueError):
try:
A, T = ATP
except (TypeError, ValueError):
# default period is 1 more than the length of the simulated series
# (no values of the cycle are repeated)
A = ATP[0]
# print(A, T, P)
# print(t[1] - t[0])
y += A * np.sin(2 * np.pi * (t - P) / T)
if sigma:
y += np.random.normal(0.0, float(sigma), N)
if rw:
y += np.random.normal(0.0, float(rw), N).cumsum()
if irw:
y += np.random.normal(0.0, float(irw), N).cumsum().cumsum()
if rrw:
y += np.random.normal(0.0, float(rrw), N).cumsum().cumsum().cumsum()
return pd.Series(y, index=t)
|
def function[simulate, parameter[t, poly, sinusoids, sigma, rw, irw, rrw]]:
constant[Simulate a random signal with seasonal (sinusoids), linear and quadratic trend, RW, IRW, and RRW
Arguments:
t (int or list of float): number of samples or time vector, default = 1000
poly (list of float): polynomial coefficients (in decreasing "order") passed to `numpy.polyval`
i.e. poly[0]*x**(N-1) + ... + poly[N-1]
sinusoids (list of list): [[period], [amplitude, period], or [ampl., period, phase]]
>>> len(simulate(poly=(0,),rrw=1))
1000
>>> simulate(t=range(3), poly=(1,2)) # doctest: +NORMALIZE_WHITESPACE
0 2
1 3
2 4
dtype: float64
>>> all(simulate(t=50, sinusoids=((1,2,3),)) == simulate(t=range(50), sinusoids=((1,2,3),)))
True
>>> any(simulate(t=100))
False
>>> abs(simulate(sinusoids=42.42).values[1] + simulate(sinusoids=42.42).values[-1]) < 1e-10
True
>>> simulate(t=17,sinusoids=[42, 16]).min()
-42.0
>>> all((simulate(t=range(10), sinusoids=(1, 9, 4.5))+simulate(t=10, sinusoids=(1,9))).abs() < 1e-10)
True
]
if <ast.BoolOp object at 0x7da20c6abc70> begin[:]
variable[t] assign[=] call[name[np].arange, parameter[name[t]]]
variable[N] assign[=] call[name[len], parameter[name[t]]]
variable[poly] assign[=] <ast.BoolOp object at 0x7da20c6a83a0>
variable[poly] assign[=] call[name[listify], parameter[name[poly]]]
variable[y] assign[=] call[name[np].polyval, parameter[name[poly], name[t]]]
variable[sinusoids] assign[=] call[name[listify], parameter[<ast.BoolOp object at 0x7da20c6ab5b0>]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da20c6a93f0>]] begin[:]
variable[sinusoids] assign[=] list[[<ast.Name object at 0x7da1b143d570>]]
for taget[name[ATP]] in starred[name[sinusoids]] begin[:]
variable[T] assign[=] binary_operation[binary_operation[binary_operation[call[name[t]][<ast.UnaryOp object at 0x7da1b143f3d0>] - call[name[t]][constant[0]]] * name[N]] / binary_operation[name[N] - constant[1.0]]]
<ast.Tuple object at 0x7da1b143cc40> assign[=] tuple[[<ast.Constant object at 0x7da1b143d1b0>, <ast.Constant object at 0x7da1b143c580>]]
<ast.Try object at 0x7da1b143f5e0>
<ast.AugAssign object at 0x7da1b143c1f0>
if name[sigma] begin[:]
<ast.AugAssign object at 0x7da1b143c490>
if name[rw] begin[:]
<ast.AugAssign object at 0x7da1b143fca0>
if name[irw] begin[:]
<ast.AugAssign object at 0x7da1b1628640>
if name[rrw] begin[:]
<ast.AugAssign object at 0x7da1b162a230>
return[call[name[pd].Series, parameter[name[y]]]]
|
keyword[def] identifier[simulate] ( identifier[t] = literal[int] , identifier[poly] =( literal[int] ,), identifier[sinusoids] = keyword[None] , identifier[sigma] = literal[int] , identifier[rw] = literal[int] , identifier[irw] = literal[int] , identifier[rrw] = literal[int] ):
literal[string]
keyword[if] identifier[t] keyword[and] identifier[isinstance] ( identifier[t] , identifier[int] ):
identifier[t] = identifier[np] . identifier[arange] ( identifier[t] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[else] :
identifier[t] = identifier[np] . identifier[array] ( identifier[t] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[N] = identifier[len] ( identifier[t] )
identifier[poly] = identifier[poly] keyword[or] ( literal[int] ,)
identifier[poly] = identifier[listify] ( identifier[poly] )
identifier[y] = identifier[np] . identifier[polyval] ( identifier[poly] , identifier[t] )
identifier[sinusoids] = identifier[listify] ( identifier[sinusoids] keyword[or] [])
keyword[if] identifier[any] ( identifier[isinstance] ( identifier[ATP] ,( identifier[int] , identifier[float] )) keyword[for] identifier[ATP] keyword[in] identifier[sinusoids] ):
identifier[sinusoids] =[ identifier[sinusoids] ]
keyword[for] identifier[ATP] keyword[in] identifier[sinusoids] :
identifier[T] =( identifier[t] [- literal[int] ]- identifier[t] [ literal[int] ])* identifier[N] /( identifier[N] - literal[int] )
identifier[A] , identifier[P] = literal[int] , literal[int]
keyword[try] :
identifier[A] , identifier[T] , identifier[P] = identifier[ATP]
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[try] :
identifier[A] , identifier[T] = identifier[ATP]
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
identifier[A] = identifier[ATP] [ literal[int] ]
identifier[y] += identifier[A] * identifier[np] . identifier[sin] ( literal[int] * identifier[np] . identifier[pi] *( identifier[t] - identifier[P] )/ identifier[T] )
keyword[if] identifier[sigma] :
identifier[y] += identifier[np] . identifier[random] . identifier[normal] ( literal[int] , identifier[float] ( identifier[sigma] ), identifier[N] )
keyword[if] identifier[rw] :
identifier[y] += identifier[np] . identifier[random] . identifier[normal] ( literal[int] , identifier[float] ( identifier[rw] ), identifier[N] ). identifier[cumsum] ()
keyword[if] identifier[irw] :
identifier[y] += identifier[np] . identifier[random] . identifier[normal] ( literal[int] , identifier[float] ( identifier[irw] ), identifier[N] ). identifier[cumsum] (). identifier[cumsum] ()
keyword[if] identifier[rrw] :
identifier[y] += identifier[np] . identifier[random] . identifier[normal] ( literal[int] , identifier[float] ( identifier[rrw] ), identifier[N] ). identifier[cumsum] (). identifier[cumsum] (). identifier[cumsum] ()
keyword[return] identifier[pd] . identifier[Series] ( identifier[y] , identifier[index] = identifier[t] )
|
def simulate(t=1000, poly=(0.0,), sinusoids=None, sigma=0, rw=0, irw=0, rrw=0):
"""Simulate a random signal with seasonal (sinusoids), linear and quadratic trend, RW, IRW, and RRW
Arguments:
t (int or list of float): number of samples or time vector, default = 1000
poly (list of float): polynomial coefficients (in decreasing "order") passed to `numpy.polyval`
i.e. poly[0]*x**(N-1) + ... + poly[N-1]
sinusoids (list of list): [[period], [amplitude, period], or [ampl., period, phase]]
>>> len(simulate(poly=(0,),rrw=1))
1000
>>> simulate(t=range(3), poly=(1,2)) # doctest: +NORMALIZE_WHITESPACE
0 2
1 3
2 4
dtype: float64
>>> all(simulate(t=50, sinusoids=((1,2,3),)) == simulate(t=range(50), sinusoids=((1,2,3),)))
True
>>> any(simulate(t=100))
False
>>> abs(simulate(sinusoids=42.42).values[1] + simulate(sinusoids=42.42).values[-1]) < 1e-10
True
>>> simulate(t=17,sinusoids=[42, 16]).min()
-42.0
>>> all((simulate(t=range(10), sinusoids=(1, 9, 4.5))+simulate(t=10, sinusoids=(1,9))).abs() < 1e-10)
True
"""
if t and isinstance(t, int):
t = np.arange(t, dtype=np.float64) # depends on [control=['if'], data=[]]
else:
t = np.array(t, dtype=np.float64)
N = len(t)
poly = poly or (0.0,)
poly = listify(poly)
y = np.polyval(poly, t)
sinusoids = listify(sinusoids or [])
if any((isinstance(ATP, (int, float)) for ATP in sinusoids)):
sinusoids = [sinusoids] # depends on [control=['if'], data=[]]
for ATP in sinusoids:
# default period is 1 more than the length of the simulated series (no values of the cycle are repeated)
T = (t[-1] - t[0]) * N / (N - 1.0)
# default amplitude is 1 and phase is 0
(A, P) = (1.0, 0)
try:
(A, T, P) = ATP # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
try:
(A, T) = ATP # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
# default period is 1 more than the length of the simulated series
# (no values of the cycle are repeated)
A = ATP[0] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
# print(A, T, P)
# print(t[1] - t[0])
y += A * np.sin(2 * np.pi * (t - P) / T) # depends on [control=['for'], data=['ATP']]
if sigma:
y += np.random.normal(0.0, float(sigma), N) # depends on [control=['if'], data=[]]
if rw:
y += np.random.normal(0.0, float(rw), N).cumsum() # depends on [control=['if'], data=[]]
if irw:
y += np.random.normal(0.0, float(irw), N).cumsum().cumsum() # depends on [control=['if'], data=[]]
if rrw:
y += np.random.normal(0.0, float(rrw), N).cumsum().cumsum().cumsum() # depends on [control=['if'], data=[]]
return pd.Series(y, index=t)
|
def merge_cycles(self):
"""Work on this graph and remove cycles, with nodes containing concatonated lists of payloads"""
while True:
### remove any self edges
own_edges = self.get_self_edges()
if len(own_edges) > 0:
for e in own_edges: self.remove_edge(e)
c = self.find_cycle()
if not c: return
keep = c[0]
remove_list = c[1:]
for n in remove_list: self.move_edges(n,keep)
for n in remove_list: keep.payload_list += n.payload_list
for n in remove_list: self.remove_node(n)
|
def function[merge_cycles, parameter[self]]:
constant[Work on this graph and remove cycles, with nodes containing concatonated lists of payloads]
while constant[True] begin[:]
variable[own_edges] assign[=] call[name[self].get_self_edges, parameter[]]
if compare[call[name[len], parameter[name[own_edges]]] greater[>] constant[0]] begin[:]
for taget[name[e]] in starred[name[own_edges]] begin[:]
call[name[self].remove_edge, parameter[name[e]]]
variable[c] assign[=] call[name[self].find_cycle, parameter[]]
if <ast.UnaryOp object at 0x7da1b09171c0> begin[:]
return[None]
variable[keep] assign[=] call[name[c]][constant[0]]
variable[remove_list] assign[=] call[name[c]][<ast.Slice object at 0x7da1b09169b0>]
for taget[name[n]] in starred[name[remove_list]] begin[:]
call[name[self].move_edges, parameter[name[n], name[keep]]]
for taget[name[n]] in starred[name[remove_list]] begin[:]
<ast.AugAssign object at 0x7da18dc07160>
for taget[name[n]] in starred[name[remove_list]] begin[:]
call[name[self].remove_node, parameter[name[n]]]
|
keyword[def] identifier[merge_cycles] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[own_edges] = identifier[self] . identifier[get_self_edges] ()
keyword[if] identifier[len] ( identifier[own_edges] )> literal[int] :
keyword[for] identifier[e] keyword[in] identifier[own_edges] : identifier[self] . identifier[remove_edge] ( identifier[e] )
identifier[c] = identifier[self] . identifier[find_cycle] ()
keyword[if] keyword[not] identifier[c] : keyword[return]
identifier[keep] = identifier[c] [ literal[int] ]
identifier[remove_list] = identifier[c] [ literal[int] :]
keyword[for] identifier[n] keyword[in] identifier[remove_list] : identifier[self] . identifier[move_edges] ( identifier[n] , identifier[keep] )
keyword[for] identifier[n] keyword[in] identifier[remove_list] : identifier[keep] . identifier[payload_list] += identifier[n] . identifier[payload_list]
keyword[for] identifier[n] keyword[in] identifier[remove_list] : identifier[self] . identifier[remove_node] ( identifier[n] )
|
def merge_cycles(self):
"""Work on this graph and remove cycles, with nodes containing concatonated lists of payloads"""
while True:
### remove any self edges
own_edges = self.get_self_edges()
if len(own_edges) > 0:
for e in own_edges:
self.remove_edge(e) # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]]
c = self.find_cycle()
if not c:
return # depends on [control=['if'], data=[]]
keep = c[0]
remove_list = c[1:]
for n in remove_list:
self.move_edges(n, keep) # depends on [control=['for'], data=['n']]
for n in remove_list:
keep.payload_list += n.payload_list # depends on [control=['for'], data=['n']]
for n in remove_list:
self.remove_node(n) # depends on [control=['for'], data=['n']] # depends on [control=['while'], data=[]]
|
def dict_to_htmlrow(d):
"""
converts a dictionary to a HTML table row
"""
res = "<TR>\n"
for k, v in d.items():
if type(v) == str:
res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + v + '</p></TD>'
else:
res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + str(v) + '</p></TD>'
res += '</TR>\n'
return res
|
def function[dict_to_htmlrow, parameter[d]]:
constant[
converts a dictionary to a HTML table row
]
variable[res] assign[=] constant[<TR>
]
for taget[tuple[[<ast.Name object at 0x7da18f00f430>, <ast.Name object at 0x7da18f00ff70>]]] in starred[call[name[d].items, parameter[]]] begin[:]
if compare[call[name[type], parameter[name[v]]] equal[==] name[str]] begin[:]
variable[res] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[res] + constant[<TD><p>]] + name[k]] + constant[:</p></TD><TD><p>]] + name[v]] + constant[</p></TD>]]
<ast.AugAssign object at 0x7da20e954c70>
return[name[res]]
|
keyword[def] identifier[dict_to_htmlrow] ( identifier[d] ):
literal[string]
identifier[res] = literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ():
keyword[if] identifier[type] ( identifier[v] )== identifier[str] :
identifier[res] = identifier[res] + literal[string] + identifier[k] + literal[string] + identifier[v] + literal[string]
keyword[else] :
identifier[res] = identifier[res] + literal[string] + identifier[k] + literal[string] + identifier[str] ( identifier[v] )+ literal[string]
identifier[res] += literal[string]
keyword[return] identifier[res]
|
def dict_to_htmlrow(d):
"""
converts a dictionary to a HTML table row
"""
res = '<TR>\n'
for (k, v) in d.items():
if type(v) == str:
res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + v + '</p></TD>' # depends on [control=['if'], data=[]]
else:
res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + str(v) + '</p></TD>' # depends on [control=['for'], data=[]]
res += '</TR>\n'
return res
|
def translate(self, frame=0):
'''Returns a Fasta sequence, translated into amino acids. Starts translating from 'frame', where frame expected to be 0,1 or 2'''
return Fasta(self.id, ''.join([genetic_codes.codes[genetic_code].get(self.seq[x:x+3].upper(), 'X') for x in range(frame, len(self)-1-frame, 3)]))
|
def function[translate, parameter[self, frame]]:
constant[Returns a Fasta sequence, translated into amino acids. Starts translating from 'frame', where frame expected to be 0,1 or 2]
return[call[name[Fasta], parameter[name[self].id, call[constant[].join, parameter[<ast.ListComp object at 0x7da1affe5ff0>]]]]]
|
keyword[def] identifier[translate] ( identifier[self] , identifier[frame] = literal[int] ):
literal[string]
keyword[return] identifier[Fasta] ( identifier[self] . identifier[id] , literal[string] . identifier[join] ([ identifier[genetic_codes] . identifier[codes] [ identifier[genetic_code] ]. identifier[get] ( identifier[self] . identifier[seq] [ identifier[x] : identifier[x] + literal[int] ]. identifier[upper] (), literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[frame] , identifier[len] ( identifier[self] )- literal[int] - identifier[frame] , literal[int] )]))
|
def translate(self, frame=0):
"""Returns a Fasta sequence, translated into amino acids. Starts translating from 'frame', where frame expected to be 0,1 or 2"""
return Fasta(self.id, ''.join([genetic_codes.codes[genetic_code].get(self.seq[x:x + 3].upper(), 'X') for x in range(frame, len(self) - 1 - frame, 3)]))
|
def last(self, values, axis=0):
"""return values at last occurance of its associated key
Parameters
----------
values : array_like, [keys, ...]
values to pick the last value of per group
axis : int, optional
alternative reduction axis for values
Returns
-------
unique: ndarray, [groups]
unique keys
reduced : ndarray, [groups, ...]
value array, reduced over groups
"""
values = np.asarray(values)
return self.unique, np.take(values, self.index.sorter[self.index.stop-1], axis)
|
def function[last, parameter[self, values, axis]]:
constant[return values at last occurance of its associated key
Parameters
----------
values : array_like, [keys, ...]
values to pick the last value of per group
axis : int, optional
alternative reduction axis for values
Returns
-------
unique: ndarray, [groups]
unique keys
reduced : ndarray, [groups, ...]
value array, reduced over groups
]
variable[values] assign[=] call[name[np].asarray, parameter[name[values]]]
return[tuple[[<ast.Attribute object at 0x7da2054a4970>, <ast.Call object at 0x7da2054a5f30>]]]
|
keyword[def] identifier[last] ( identifier[self] , identifier[values] , identifier[axis] = literal[int] ):
literal[string]
identifier[values] = identifier[np] . identifier[asarray] ( identifier[values] )
keyword[return] identifier[self] . identifier[unique] , identifier[np] . identifier[take] ( identifier[values] , identifier[self] . identifier[index] . identifier[sorter] [ identifier[self] . identifier[index] . identifier[stop] - literal[int] ], identifier[axis] )
|
def last(self, values, axis=0):
"""return values at last occurance of its associated key
Parameters
----------
values : array_like, [keys, ...]
values to pick the last value of per group
axis : int, optional
alternative reduction axis for values
Returns
-------
unique: ndarray, [groups]
unique keys
reduced : ndarray, [groups, ...]
value array, reduced over groups
"""
values = np.asarray(values)
return (self.unique, np.take(values, self.index.sorter[self.index.stop - 1], axis))
|
def head(self, x, y, layers=None, aq=None):
"""Head at `x`, `y`
Returns
-------
h : array length `naq` or `len(layers)`
head in all `layers` (if not `None`), or all layers of aquifer (otherwise)
"""
if aq is None: aq = self.aq.find_aquifer_data(x, y)
rv = self.potential(x, y, aq) / aq.T
if layers is None:
return rv
else:
return rv[layers]
|
def function[head, parameter[self, x, y, layers, aq]]:
constant[Head at `x`, `y`
Returns
-------
h : array length `naq` or `len(layers)`
head in all `layers` (if not `None`), or all layers of aquifer (otherwise)
]
if compare[name[aq] is constant[None]] begin[:]
variable[aq] assign[=] call[name[self].aq.find_aquifer_data, parameter[name[x], name[y]]]
variable[rv] assign[=] binary_operation[call[name[self].potential, parameter[name[x], name[y], name[aq]]] / name[aq].T]
if compare[name[layers] is constant[None]] begin[:]
return[name[rv]]
|
keyword[def] identifier[head] ( identifier[self] , identifier[x] , identifier[y] , identifier[layers] = keyword[None] , identifier[aq] = keyword[None] ):
literal[string]
keyword[if] identifier[aq] keyword[is] keyword[None] : identifier[aq] = identifier[self] . identifier[aq] . identifier[find_aquifer_data] ( identifier[x] , identifier[y] )
identifier[rv] = identifier[self] . identifier[potential] ( identifier[x] , identifier[y] , identifier[aq] )/ identifier[aq] . identifier[T]
keyword[if] identifier[layers] keyword[is] keyword[None] :
keyword[return] identifier[rv]
keyword[else] :
keyword[return] identifier[rv] [ identifier[layers] ]
|
def head(self, x, y, layers=None, aq=None):
"""Head at `x`, `y`
Returns
-------
h : array length `naq` or `len(layers)`
head in all `layers` (if not `None`), or all layers of aquifer (otherwise)
"""
if aq is None:
aq = self.aq.find_aquifer_data(x, y) # depends on [control=['if'], data=['aq']]
rv = self.potential(x, y, aq) / aq.T
if layers is None:
return rv # depends on [control=['if'], data=[]]
else:
return rv[layers]
|
def convert_idx_to_name(self, y, lens):
"""Convert label index to name.
Args:
y (list): label index list.
lens (list): true length of y.
Returns:
y: label name list.
Examples:
>>> # assumes that id2label = {1: 'B-LOC', 2: 'I-LOC'}
>>> y = [[1, 0, 0], [1, 2, 0], [1, 1, 1]]
>>> lens = [1, 2, 3]
>>> self.convert_idx_to_name(y, lens)
[['B-LOC'], ['B-LOC', 'I-LOC'], ['B-LOC', 'B-LOC', 'B-LOC']]
"""
y = [[self.id2label[idx] for idx in row[:l]]
for row, l in zip(y, lens)]
return y
|
def function[convert_idx_to_name, parameter[self, y, lens]]:
constant[Convert label index to name.
Args:
y (list): label index list.
lens (list): true length of y.
Returns:
y: label name list.
Examples:
>>> # assumes that id2label = {1: 'B-LOC', 2: 'I-LOC'}
>>> y = [[1, 0, 0], [1, 2, 0], [1, 1, 1]]
>>> lens = [1, 2, 3]
>>> self.convert_idx_to_name(y, lens)
[['B-LOC'], ['B-LOC', 'I-LOC'], ['B-LOC', 'B-LOC', 'B-LOC']]
]
variable[y] assign[=] <ast.ListComp object at 0x7da1b07b0820>
return[name[y]]
|
keyword[def] identifier[convert_idx_to_name] ( identifier[self] , identifier[y] , identifier[lens] ):
literal[string]
identifier[y] =[[ identifier[self] . identifier[id2label] [ identifier[idx] ] keyword[for] identifier[idx] keyword[in] identifier[row] [: identifier[l] ]]
keyword[for] identifier[row] , identifier[l] keyword[in] identifier[zip] ( identifier[y] , identifier[lens] )]
keyword[return] identifier[y]
|
def convert_idx_to_name(self, y, lens):
"""Convert label index to name.
Args:
y (list): label index list.
lens (list): true length of y.
Returns:
y: label name list.
Examples:
>>> # assumes that id2label = {1: 'B-LOC', 2: 'I-LOC'}
>>> y = [[1, 0, 0], [1, 2, 0], [1, 1, 1]]
>>> lens = [1, 2, 3]
>>> self.convert_idx_to_name(y, lens)
[['B-LOC'], ['B-LOC', 'I-LOC'], ['B-LOC', 'B-LOC', 'B-LOC']]
"""
y = [[self.id2label[idx] for idx in row[:l]] for (row, l) in zip(y, lens)]
return y
|
def _generate_label_matrix(self):
"""Generate an [n,m] label matrix with entries in {0,...,k}"""
self.L = np.zeros((self.n, self.m))
self.Y = np.zeros(self.n, dtype=np.int64)
for i in range(self.n):
y = choice(self.k, p=self.p) + 1 # Note that y \in {1,...,k}
self.Y[i] = y
for j in range(self.m):
p_j = self.parent.get(j, 0)
prob_y = self.P_conditional(j, y, p_j, self.L[i, p_j], y)
prob_0 = self.P_conditional(j, 0, p_j, self.L[i, p_j], y)
p = np.ones(self.k + 1) * (1 - prob_y - prob_0) / (self.k - 1)
p[0] = prob_0
p[y] = prob_y
self.L[i, j] = choice(self.k + 1, p=p)
|
def function[_generate_label_matrix, parameter[self]]:
constant[Generate an [n,m] label matrix with entries in {0,...,k}]
name[self].L assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da1b1b47880>, <ast.Attribute object at 0x7da1b1b47190>]]]]
name[self].Y assign[=] call[name[np].zeros, parameter[name[self].n]]
for taget[name[i]] in starred[call[name[range], parameter[name[self].n]]] begin[:]
variable[y] assign[=] binary_operation[call[name[choice], parameter[name[self].k]] + constant[1]]
call[name[self].Y][name[i]] assign[=] name[y]
for taget[name[j]] in starred[call[name[range], parameter[name[self].m]]] begin[:]
variable[p_j] assign[=] call[name[self].parent.get, parameter[name[j], constant[0]]]
variable[prob_y] assign[=] call[name[self].P_conditional, parameter[name[j], name[y], name[p_j], call[name[self].L][tuple[[<ast.Name object at 0x7da1b1b45d50>, <ast.Name object at 0x7da1b1b47010>]]], name[y]]]
variable[prob_0] assign[=] call[name[self].P_conditional, parameter[name[j], constant[0], name[p_j], call[name[self].L][tuple[[<ast.Name object at 0x7da1b1b47850>, <ast.Name object at 0x7da1b1b44fa0>]]], name[y]]]
variable[p] assign[=] binary_operation[binary_operation[call[name[np].ones, parameter[binary_operation[name[self].k + constant[1]]]] * binary_operation[binary_operation[constant[1] - name[prob_y]] - name[prob_0]]] / binary_operation[name[self].k - constant[1]]]
call[name[p]][constant[0]] assign[=] name[prob_0]
call[name[p]][name[y]] assign[=] name[prob_y]
call[name[self].L][tuple[[<ast.Name object at 0x7da1b1cef490>, <ast.Name object at 0x7da1b1cefcd0>]]] assign[=] call[name[choice], parameter[binary_operation[name[self].k + constant[1]]]]
|
keyword[def] identifier[_generate_label_matrix] ( identifier[self] ):
literal[string]
identifier[self] . identifier[L] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[n] , identifier[self] . identifier[m] ))
identifier[self] . identifier[Y] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[n] , identifier[dtype] = identifier[np] . identifier[int64] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[n] ):
identifier[y] = identifier[choice] ( identifier[self] . identifier[k] , identifier[p] = identifier[self] . identifier[p] )+ literal[int]
identifier[self] . identifier[Y] [ identifier[i] ]= identifier[y]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[m] ):
identifier[p_j] = identifier[self] . identifier[parent] . identifier[get] ( identifier[j] , literal[int] )
identifier[prob_y] = identifier[self] . identifier[P_conditional] ( identifier[j] , identifier[y] , identifier[p_j] , identifier[self] . identifier[L] [ identifier[i] , identifier[p_j] ], identifier[y] )
identifier[prob_0] = identifier[self] . identifier[P_conditional] ( identifier[j] , literal[int] , identifier[p_j] , identifier[self] . identifier[L] [ identifier[i] , identifier[p_j] ], identifier[y] )
identifier[p] = identifier[np] . identifier[ones] ( identifier[self] . identifier[k] + literal[int] )*( literal[int] - identifier[prob_y] - identifier[prob_0] )/( identifier[self] . identifier[k] - literal[int] )
identifier[p] [ literal[int] ]= identifier[prob_0]
identifier[p] [ identifier[y] ]= identifier[prob_y]
identifier[self] . identifier[L] [ identifier[i] , identifier[j] ]= identifier[choice] ( identifier[self] . identifier[k] + literal[int] , identifier[p] = identifier[p] )
|
def _generate_label_matrix(self):
"""Generate an [n,m] label matrix with entries in {0,...,k}"""
self.L = np.zeros((self.n, self.m))
self.Y = np.zeros(self.n, dtype=np.int64)
for i in range(self.n):
y = choice(self.k, p=self.p) + 1 # Note that y \in {1,...,k}
self.Y[i] = y
for j in range(self.m):
p_j = self.parent.get(j, 0)
prob_y = self.P_conditional(j, y, p_j, self.L[i, p_j], y)
prob_0 = self.P_conditional(j, 0, p_j, self.L[i, p_j], y)
p = np.ones(self.k + 1) * (1 - prob_y - prob_0) / (self.k - 1)
p[0] = prob_0
p[y] = prob_y
self.L[i, j] = choice(self.k + 1, p=p) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
|
def addClass(self, className):
'''
addClass - append a class name to the end of the "class" attribute, if not present
@param className <str> - The name of the class to add
'''
className = stripWordsOnly(className)
if not className:
return None
if ' ' in className:
# Multiple class names passed, do one at a time
for oneClassName in className.split(' '):
self.addClass(oneClassName)
return
myClassNames = self._classNames
# Do not allow duplicates
if className in myClassNames:
return
# Regenerate "classNames" and "class" attr.
# TODO: Maybe those should be properties?
myClassNames.append(className)
return None
|
def function[addClass, parameter[self, className]]:
constant[
addClass - append a class name to the end of the "class" attribute, if not present
@param className <str> - The name of the class to add
]
variable[className] assign[=] call[name[stripWordsOnly], parameter[name[className]]]
if <ast.UnaryOp object at 0x7da1b11536d0> begin[:]
return[constant[None]]
if compare[constant[ ] in name[className]] begin[:]
for taget[name[oneClassName]] in starred[call[name[className].split, parameter[constant[ ]]]] begin[:]
call[name[self].addClass, parameter[name[oneClassName]]]
return[None]
variable[myClassNames] assign[=] name[self]._classNames
if compare[name[className] in name[myClassNames]] begin[:]
return[None]
call[name[myClassNames].append, parameter[name[className]]]
return[constant[None]]
|
keyword[def] identifier[addClass] ( identifier[self] , identifier[className] ):
literal[string]
identifier[className] = identifier[stripWordsOnly] ( identifier[className] )
keyword[if] keyword[not] identifier[className] :
keyword[return] keyword[None]
keyword[if] literal[string] keyword[in] identifier[className] :
keyword[for] identifier[oneClassName] keyword[in] identifier[className] . identifier[split] ( literal[string] ):
identifier[self] . identifier[addClass] ( identifier[oneClassName] )
keyword[return]
identifier[myClassNames] = identifier[self] . identifier[_classNames]
keyword[if] identifier[className] keyword[in] identifier[myClassNames] :
keyword[return]
identifier[myClassNames] . identifier[append] ( identifier[className] )
keyword[return] keyword[None]
|
def addClass(self, className):
"""
addClass - append a class name to the end of the "class" attribute, if not present
@param className <str> - The name of the class to add
"""
className = stripWordsOnly(className)
if not className:
return None # depends on [control=['if'], data=[]]
if ' ' in className:
# Multiple class names passed, do one at a time
for oneClassName in className.split(' '):
self.addClass(oneClassName) # depends on [control=['for'], data=['oneClassName']]
return # depends on [control=['if'], data=['className']]
myClassNames = self._classNames
# Do not allow duplicates
if className in myClassNames:
return # depends on [control=['if'], data=[]]
# Regenerate "classNames" and "class" attr.
# TODO: Maybe those should be properties?
myClassNames.append(className)
return None
|
def replace_greek(self, name):
"""Replace text representing greek letters with greek letters."""
name = name.replace('gamma-delta', 'gammadelta')
name = name.replace('interleukin-1 beta', 'interleukin-1beta')
greek_present = False
for greek_txt, uni in self.greek2uni.items():
if greek_txt in name:
greek_present = True
name = name.replace(greek_txt, "{B}".format(B=uni))
if greek_present is True:
name = unicode(name, 'utf-8') # For writing to xlsx
return name
|
def function[replace_greek, parameter[self, name]]:
constant[Replace text representing greek letters with greek letters.]
variable[name] assign[=] call[name[name].replace, parameter[constant[gamma-delta], constant[gammadelta]]]
variable[name] assign[=] call[name[name].replace, parameter[constant[interleukin-1 beta], constant[interleukin-1beta]]]
variable[greek_present] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da20e9b0520>, <ast.Name object at 0x7da20e9b1c90>]]] in starred[call[name[self].greek2uni.items, parameter[]]] begin[:]
if compare[name[greek_txt] in name[name]] begin[:]
variable[greek_present] assign[=] constant[True]
variable[name] assign[=] call[name[name].replace, parameter[name[greek_txt], call[constant[{B}].format, parameter[]]]]
if compare[name[greek_present] is constant[True]] begin[:]
variable[name] assign[=] call[name[unicode], parameter[name[name], constant[utf-8]]]
return[name[name]]
|
keyword[def] identifier[replace_greek] ( identifier[self] , identifier[name] ):
literal[string]
identifier[name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
identifier[name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
identifier[greek_present] = keyword[False]
keyword[for] identifier[greek_txt] , identifier[uni] keyword[in] identifier[self] . identifier[greek2uni] . identifier[items] ():
keyword[if] identifier[greek_txt] keyword[in] identifier[name] :
identifier[greek_present] = keyword[True]
identifier[name] = identifier[name] . identifier[replace] ( identifier[greek_txt] , literal[string] . identifier[format] ( identifier[B] = identifier[uni] ))
keyword[if] identifier[greek_present] keyword[is] keyword[True] :
identifier[name] = identifier[unicode] ( identifier[name] , literal[string] )
keyword[return] identifier[name]
|
def replace_greek(self, name):
"""Replace text representing greek letters with greek letters."""
name = name.replace('gamma-delta', 'gammadelta')
name = name.replace('interleukin-1 beta', 'interleukin-1beta')
greek_present = False
for (greek_txt, uni) in self.greek2uni.items():
if greek_txt in name:
greek_present = True
name = name.replace(greek_txt, '{B}'.format(B=uni)) # depends on [control=['if'], data=['greek_txt', 'name']] # depends on [control=['for'], data=[]]
if greek_present is True:
name = unicode(name, 'utf-8') # For writing to xlsx # depends on [control=['if'], data=[]]
return name
|
async def sinterstore(self, dest, keys, *args):
"""
Store the intersection of sets specified by ``keys`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(keys, args)
return await self.execute_command('SINTERSTORE', dest, *args)
|
<ast.AsyncFunctionDef object at 0x7da1b077b850>
|
keyword[async] keyword[def] identifier[sinterstore] ( identifier[self] , identifier[dest] , identifier[keys] ,* identifier[args] ):
literal[string]
identifier[args] = identifier[list_or_args] ( identifier[keys] , identifier[args] )
keyword[return] keyword[await] identifier[self] . identifier[execute_command] ( literal[string] , identifier[dest] ,* identifier[args] )
|
async def sinterstore(self, dest, keys, *args):
"""
Store the intersection of sets specified by ``keys`` into a new
set named ``dest``. Returns the number of keys in the new set.
"""
args = list_or_args(keys, args)
return await self.execute_command('SINTERSTORE', dest, *args)
|
def popen(self, args, **kwargs):
"""
creates a subprocess with passed args
"""
self.log.debug("popen %s", ' '.join(args))
return vaping.io.subprocess.Popen(args, **kwargs)
|
def function[popen, parameter[self, args]]:
constant[
creates a subprocess with passed args
]
call[name[self].log.debug, parameter[constant[popen %s], call[constant[ ].join, parameter[name[args]]]]]
return[call[name[vaping].io.subprocess.Popen, parameter[name[args]]]]
|
keyword[def] identifier[popen] ( identifier[self] , identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[args] ))
keyword[return] identifier[vaping] . identifier[io] . identifier[subprocess] . identifier[Popen] ( identifier[args] ,** identifier[kwargs] )
|
def popen(self, args, **kwargs):
"""
creates a subprocess with passed args
"""
self.log.debug('popen %s', ' '.join(args))
return vaping.io.subprocess.Popen(args, **kwargs)
|
def relation(self, rel):
"""Process each relation."""
rel_type = rel.tags.get('type')
if any([rel.deleted,
not rel.visible,
not self.is_new_version(rel),
rel_type not in ['route', 'public_transport']]):
return
route_tag = rel.tags.get('route')
if rel_type == 'route' and route_tag not in self.transit_route_types:
return
public_transport = rel.tags.get('public_transport')
if rel_type == 'public_transport' and public_transport != 'stop_area':
return
self.relations[rel.id] = \
Relation(rel.id, {
'type': rel_type,
'public_transport': public_transport,
'route': route_tag,
'operator': rel.tags.get('operator'),
'color': rel.tags.get('color'),
'ref': rel.tags.get('ref'),
'from': rel.tags.get('from'),
'to': rel.tags.get('to'),
'name': rel.tags.get('name'),
'alt_name': rel.tags.get('alt_name'),
'url': rel.tags.get('url'),
'contact_website': rel.tags.get('contact:website')},
[(member.type, member.ref, member.role) for member in rel.members])
self.versions[rel.id] = rel.version
|
def function[relation, parameter[self, rel]]:
constant[Process each relation.]
variable[rel_type] assign[=] call[name[rel].tags.get, parameter[constant[type]]]
if call[name[any], parameter[list[[<ast.Attribute object at 0x7da1b038b340>, <ast.UnaryOp object at 0x7da1b03883d0>, <ast.UnaryOp object at 0x7da1b0388130>, <ast.Compare object at 0x7da1b03897e0>]]]] begin[:]
return[None]
variable[route_tag] assign[=] call[name[rel].tags.get, parameter[constant[route]]]
if <ast.BoolOp object at 0x7da1b038a860> begin[:]
return[None]
variable[public_transport] assign[=] call[name[rel].tags.get, parameter[constant[public_transport]]]
if <ast.BoolOp object at 0x7da1b0389e40> begin[:]
return[None]
call[name[self].relations][name[rel].id] assign[=] call[name[Relation], parameter[name[rel].id, dictionary[[<ast.Constant object at 0x7da18f7204c0>, <ast.Constant object at 0x7da18f720cd0>, <ast.Constant object at 0x7da18f721960>, <ast.Constant object at 0x7da18f721270>, <ast.Constant object at 0x7da18f7221a0>, <ast.Constant object at 0x7da18f720b50>, <ast.Constant object at 0x7da18f7212a0>, <ast.Constant object at 0x7da18f7212d0>, <ast.Constant object at 0x7da18f723160>, <ast.Constant object at 0x7da18f723730>, <ast.Constant object at 0x7da18f7214e0>, <ast.Constant object at 0x7da18f723a60>], [<ast.Name object at 0x7da18f722440>, <ast.Name object at 0x7da18f722c80>, <ast.Name object at 0x7da18f721c00>, <ast.Call object at 0x7da18f7232b0>, <ast.Call object at 0x7da18f723820>, <ast.Call object at 0x7da18f7202e0>, <ast.Call object at 0x7da18f723f40>, <ast.Call object at 0x7da18f722cb0>, <ast.Call object at 0x7da18f721ed0>, <ast.Call object at 0x7da18f7220b0>, <ast.Call object at 0x7da18f721810>, <ast.Call object at 0x7da18f722ef0>]], <ast.ListComp object at 0x7da18f721450>]]
call[name[self].versions][name[rel].id] assign[=] name[rel].version
|
keyword[def] identifier[relation] ( identifier[self] , identifier[rel] ):
literal[string]
identifier[rel_type] = identifier[rel] . identifier[tags] . identifier[get] ( literal[string] )
keyword[if] identifier[any] ([ identifier[rel] . identifier[deleted] ,
keyword[not] identifier[rel] . identifier[visible] ,
keyword[not] identifier[self] . identifier[is_new_version] ( identifier[rel] ),
identifier[rel_type] keyword[not] keyword[in] [ literal[string] , literal[string] ]]):
keyword[return]
identifier[route_tag] = identifier[rel] . identifier[tags] . identifier[get] ( literal[string] )
keyword[if] identifier[rel_type] == literal[string] keyword[and] identifier[route_tag] keyword[not] keyword[in] identifier[self] . identifier[transit_route_types] :
keyword[return]
identifier[public_transport] = identifier[rel] . identifier[tags] . identifier[get] ( literal[string] )
keyword[if] identifier[rel_type] == literal[string] keyword[and] identifier[public_transport] != literal[string] :
keyword[return]
identifier[self] . identifier[relations] [ identifier[rel] . identifier[id] ]= identifier[Relation] ( identifier[rel] . identifier[id] ,{
literal[string] : identifier[rel_type] ,
literal[string] : identifier[public_transport] ,
literal[string] : identifier[route_tag] ,
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] ),
literal[string] : identifier[rel] . identifier[tags] . identifier[get] ( literal[string] )},
[( identifier[member] . identifier[type] , identifier[member] . identifier[ref] , identifier[member] . identifier[role] ) keyword[for] identifier[member] keyword[in] identifier[rel] . identifier[members] ])
identifier[self] . identifier[versions] [ identifier[rel] . identifier[id] ]= identifier[rel] . identifier[version]
|
def relation(self, rel):
"""Process each relation."""
rel_type = rel.tags.get('type')
if any([rel.deleted, not rel.visible, not self.is_new_version(rel), rel_type not in ['route', 'public_transport']]):
return # depends on [control=['if'], data=[]]
route_tag = rel.tags.get('route')
if rel_type == 'route' and route_tag not in self.transit_route_types:
return # depends on [control=['if'], data=[]]
public_transport = rel.tags.get('public_transport')
if rel_type == 'public_transport' and public_transport != 'stop_area':
return # depends on [control=['if'], data=[]]
self.relations[rel.id] = Relation(rel.id, {'type': rel_type, 'public_transport': public_transport, 'route': route_tag, 'operator': rel.tags.get('operator'), 'color': rel.tags.get('color'), 'ref': rel.tags.get('ref'), 'from': rel.tags.get('from'), 'to': rel.tags.get('to'), 'name': rel.tags.get('name'), 'alt_name': rel.tags.get('alt_name'), 'url': rel.tags.get('url'), 'contact_website': rel.tags.get('contact:website')}, [(member.type, member.ref, member.role) for member in rel.members])
self.versions[rel.id] = rel.version
|
def nn_y(self, y, dims=None, k = 1, radius=np.inf, eps=0.0, p=2):
"""Find the k nearest neighbors of y in the observed output data
@see Databag.nn() for argument description
@return distance and indexes of found nearest neighbors.
"""
if dims is None:
assert len(y) == self.dim_y
k_y = min(k, self.__len__())
return self._nn(DATA_Y, y, k=k_y, radius=radius, eps=eps, p=p)
else:
return self.nn_y_sub(y, dims, k, radius, eps, p)
|
def function[nn_y, parameter[self, y, dims, k, radius, eps, p]]:
constant[Find the k nearest neighbors of y in the observed output data
@see Databag.nn() for argument description
@return distance and indexes of found nearest neighbors.
]
if compare[name[dims] is constant[None]] begin[:]
assert[compare[call[name[len], parameter[name[y]]] equal[==] name[self].dim_y]]
variable[k_y] assign[=] call[name[min], parameter[name[k], call[name[self].__len__, parameter[]]]]
return[call[name[self]._nn, parameter[name[DATA_Y], name[y]]]]
|
keyword[def] identifier[nn_y] ( identifier[self] , identifier[y] , identifier[dims] = keyword[None] , identifier[k] = literal[int] , identifier[radius] = identifier[np] . identifier[inf] , identifier[eps] = literal[int] , identifier[p] = literal[int] ):
literal[string]
keyword[if] identifier[dims] keyword[is] keyword[None] :
keyword[assert] identifier[len] ( identifier[y] )== identifier[self] . identifier[dim_y]
identifier[k_y] = identifier[min] ( identifier[k] , identifier[self] . identifier[__len__] ())
keyword[return] identifier[self] . identifier[_nn] ( identifier[DATA_Y] , identifier[y] , identifier[k] = identifier[k_y] , identifier[radius] = identifier[radius] , identifier[eps] = identifier[eps] , identifier[p] = identifier[p] )
keyword[else] :
keyword[return] identifier[self] . identifier[nn_y_sub] ( identifier[y] , identifier[dims] , identifier[k] , identifier[radius] , identifier[eps] , identifier[p] )
|
def nn_y(self, y, dims=None, k=1, radius=np.inf, eps=0.0, p=2):
"""Find the k nearest neighbors of y in the observed output data
@see Databag.nn() for argument description
@return distance and indexes of found nearest neighbors.
"""
if dims is None:
assert len(y) == self.dim_y
k_y = min(k, self.__len__())
return self._nn(DATA_Y, y, k=k_y, radius=radius, eps=eps, p=p) # depends on [control=['if'], data=[]]
else:
return self.nn_y_sub(y, dims, k, radius, eps, p)
|
def tiles_from_bounds(self, bounds, zoom):
"""
Return all tiles intersecting with bounds.
Bounds values will be cleaned if they cross the antimeridian or are
outside of the Northern or Southern tile pyramid bounds.
Parameters
----------
bounds : tuple
(left, bottom, right, top) bounding values in tile pyramid CRS
zoom : integer
zoom level
Yields
------
intersecting tiles : generator
generates ``BufferedTiles``
"""
for tile in self.tiles_from_bbox(box(*bounds), zoom):
yield self.tile(*tile.id)
|
def function[tiles_from_bounds, parameter[self, bounds, zoom]]:
constant[
Return all tiles intersecting with bounds.
Bounds values will be cleaned if they cross the antimeridian or are
outside of the Northern or Southern tile pyramid bounds.
Parameters
----------
bounds : tuple
(left, bottom, right, top) bounding values in tile pyramid CRS
zoom : integer
zoom level
Yields
------
intersecting tiles : generator
generates ``BufferedTiles``
]
for taget[name[tile]] in starred[call[name[self].tiles_from_bbox, parameter[call[name[box], parameter[<ast.Starred object at 0x7da20c991210>]], name[zoom]]]] begin[:]
<ast.Yield object at 0x7da20c991870>
|
keyword[def] identifier[tiles_from_bounds] ( identifier[self] , identifier[bounds] , identifier[zoom] ):
literal[string]
keyword[for] identifier[tile] keyword[in] identifier[self] . identifier[tiles_from_bbox] ( identifier[box] (* identifier[bounds] ), identifier[zoom] ):
keyword[yield] identifier[self] . identifier[tile] (* identifier[tile] . identifier[id] )
|
def tiles_from_bounds(self, bounds, zoom):
"""
Return all tiles intersecting with bounds.
Bounds values will be cleaned if they cross the antimeridian or are
outside of the Northern or Southern tile pyramid bounds.
Parameters
----------
bounds : tuple
(left, bottom, right, top) bounding values in tile pyramid CRS
zoom : integer
zoom level
Yields
------
intersecting tiles : generator
generates ``BufferedTiles``
"""
for tile in self.tiles_from_bbox(box(*bounds), zoom):
yield self.tile(*tile.id) # depends on [control=['for'], data=['tile']]
|
def activate_membercard(self, membership_number, code, **kwargs):
"""
激活会员卡 - 接口激活方式
详情请参见
https://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1451025283
参数示例:
{
"init_bonus": 100,
"init_bonus_record":"旧积分同步",
"init_balance": 200,
"membership_number": "AAA00000001",
"code": "12312313",
"card_id": "xxxx_card_id",
"background_pic_url": "https://mmbiz.qlogo.cn/mmbiz/0?wx_fmt=jpeg",
"init_custom_field_value1": "xxxxx",
"init_custom_field_value2": "xxxxx",
"init_custom_field_value3": "xxxxx"
}
返回示例:
{"errcode":0, "errmsg":"ok"}
:param membership_number: 必填,会员卡编号,由开发者填入,作为序列号显示在用户的卡包里。可与Code码保持等值
:param code: 必填,领取会员卡用户获得的code
:param kwargs: 其他非必填字段,包含则更新对应字段。详情参见微信文档 “6 激活会员卡” 部分
:return: 参见返回示例
"""
kwargs['membership_number'] = membership_number
kwargs['code'] = code
return self._post(
'card/membercard/activate',
data=kwargs
)
|
def function[activate_membercard, parameter[self, membership_number, code]]:
constant[
激活会员卡 - 接口激活方式
详情请参见
https://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1451025283
参数示例:
{
"init_bonus": 100,
"init_bonus_record":"旧积分同步",
"init_balance": 200,
"membership_number": "AAA00000001",
"code": "12312313",
"card_id": "xxxx_card_id",
"background_pic_url": "https://mmbiz.qlogo.cn/mmbiz/0?wx_fmt=jpeg",
"init_custom_field_value1": "xxxxx",
"init_custom_field_value2": "xxxxx",
"init_custom_field_value3": "xxxxx"
}
返回示例:
{"errcode":0, "errmsg":"ok"}
:param membership_number: 必填,会员卡编号,由开发者填入,作为序列号显示在用户的卡包里。可与Code码保持等值
:param code: 必填,领取会员卡用户获得的code
:param kwargs: 其他非必填字段,包含则更新对应字段。详情参见微信文档 “6 激活会员卡” 部分
:return: 参见返回示例
]
call[name[kwargs]][constant[membership_number]] assign[=] name[membership_number]
call[name[kwargs]][constant[code]] assign[=] name[code]
return[call[name[self]._post, parameter[constant[card/membercard/activate]]]]
|
keyword[def] identifier[activate_membercard] ( identifier[self] , identifier[membership_number] , identifier[code] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[membership_number]
identifier[kwargs] [ literal[string] ]= identifier[code]
keyword[return] identifier[self] . identifier[_post] (
literal[string] ,
identifier[data] = identifier[kwargs]
)
|
def activate_membercard(self, membership_number, code, **kwargs):
"""
激活会员卡 - 接口激活方式
详情请参见
https://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1451025283
参数示例:
{
"init_bonus": 100,
"init_bonus_record":"旧积分同步",
"init_balance": 200,
"membership_number": "AAA00000001",
"code": "12312313",
"card_id": "xxxx_card_id",
"background_pic_url": "https://mmbiz.qlogo.cn/mmbiz/0?wx_fmt=jpeg",
"init_custom_field_value1": "xxxxx",
"init_custom_field_value2": "xxxxx",
"init_custom_field_value3": "xxxxx"
}
返回示例:
{"errcode":0, "errmsg":"ok"}
:param membership_number: 必填,会员卡编号,由开发者填入,作为序列号显示在用户的卡包里。可与Code码保持等值
:param code: 必填,领取会员卡用户获得的code
:param kwargs: 其他非必填字段,包含则更新对应字段。详情参见微信文档 “6 激活会员卡” 部分
:return: 参见返回示例
"""
kwargs['membership_number'] = membership_number
kwargs['code'] = code
return self._post('card/membercard/activate', data=kwargs)
|
def reset_stats_history(self):
"""Reset the stats history (dict of GlancesAttribute)."""
if self.history_enable():
reset_list = [a['name'] for a in self.get_items_history_list()]
logger.debug("Reset history for plugin {} (items: {})".format(self.plugin_name, reset_list))
self.stats_history.reset()
|
def function[reset_stats_history, parameter[self]]:
constant[Reset the stats history (dict of GlancesAttribute).]
if call[name[self].history_enable, parameter[]] begin[:]
variable[reset_list] assign[=] <ast.ListComp object at 0x7da2044c02b0>
call[name[logger].debug, parameter[call[constant[Reset history for plugin {} (items: {})].format, parameter[name[self].plugin_name, name[reset_list]]]]]
call[name[self].stats_history.reset, parameter[]]
|
keyword[def] identifier[reset_stats_history] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[history_enable] ():
identifier[reset_list] =[ identifier[a] [ literal[string] ] keyword[for] identifier[a] keyword[in] identifier[self] . identifier[get_items_history_list] ()]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[plugin_name] , identifier[reset_list] ))
identifier[self] . identifier[stats_history] . identifier[reset] ()
|
def reset_stats_history(self):
"""Reset the stats history (dict of GlancesAttribute)."""
if self.history_enable():
reset_list = [a['name'] for a in self.get_items_history_list()]
logger.debug('Reset history for plugin {} (items: {})'.format(self.plugin_name, reset_list))
self.stats_history.reset() # depends on [control=['if'], data=[]]
|
def get_remote_content_len(self, remote, headers=None):
"""
:param remote:
:return: size of remote file
"""
if headers is None:
headers = self._get_default_request_headers()
req = urllib.request.Request(remote, headers=headers)
try:
response = urllib.request.urlopen(req)
resp_header = response.info()
byte_size = resp_header.get('Content-length')
except OSError as err:
byte_size = None
LOG.error(err)
return byte_size
|
def function[get_remote_content_len, parameter[self, remote, headers]]:
constant[
:param remote:
:return: size of remote file
]
if compare[name[headers] is constant[None]] begin[:]
variable[headers] assign[=] call[name[self]._get_default_request_headers, parameter[]]
variable[req] assign[=] call[name[urllib].request.Request, parameter[name[remote]]]
<ast.Try object at 0x7da20e9545e0>
return[name[byte_size]]
|
keyword[def] identifier[get_remote_content_len] ( identifier[self] , identifier[remote] , identifier[headers] = keyword[None] ):
literal[string]
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[headers] = identifier[self] . identifier[_get_default_request_headers] ()
identifier[req] = identifier[urllib] . identifier[request] . identifier[Request] ( identifier[remote] , identifier[headers] = identifier[headers] )
keyword[try] :
identifier[response] = identifier[urllib] . identifier[request] . identifier[urlopen] ( identifier[req] )
identifier[resp_header] = identifier[response] . identifier[info] ()
identifier[byte_size] = identifier[resp_header] . identifier[get] ( literal[string] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
identifier[byte_size] = keyword[None]
identifier[LOG] . identifier[error] ( identifier[err] )
keyword[return] identifier[byte_size]
|
def get_remote_content_len(self, remote, headers=None):
"""
:param remote:
:return: size of remote file
"""
if headers is None:
headers = self._get_default_request_headers() # depends on [control=['if'], data=['headers']]
req = urllib.request.Request(remote, headers=headers)
try:
response = urllib.request.urlopen(req)
resp_header = response.info()
byte_size = resp_header.get('Content-length') # depends on [control=['try'], data=[]]
except OSError as err:
byte_size = None
LOG.error(err) # depends on [control=['except'], data=['err']]
return byte_size
|
def get_pub_id(title, authors, year):
"construct publication id"
if len(title.split(' ')) > 1 \
and title.split(' ')[0].lower() in ['the', 'a']:
_first_word = title.split(' ')[1].split('_')[0]
else:
_first_word = title.split(' ')[0].split('_')[0]
pub_id = authors[0].split(',')[0].split(' ')[0] + \
_first_word + \
str(year)
return pub_id
|
def function[get_pub_id, parameter[title, authors, year]]:
constant[construct publication id]
if <ast.BoolOp object at 0x7da18dc06950> begin[:]
variable[_first_word] assign[=] call[call[call[call[name[title].split, parameter[constant[ ]]]][constant[1]].split, parameter[constant[_]]]][constant[0]]
variable[pub_id] assign[=] binary_operation[binary_operation[call[call[call[call[call[name[authors]][constant[0]].split, parameter[constant[,]]]][constant[0]].split, parameter[constant[ ]]]][constant[0]] + name[_first_word]] + call[name[str], parameter[name[year]]]]
return[name[pub_id]]
|
keyword[def] identifier[get_pub_id] ( identifier[title] , identifier[authors] , identifier[year] ):
literal[string]
keyword[if] identifier[len] ( identifier[title] . identifier[split] ( literal[string] ))> literal[int] keyword[and] identifier[title] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[lower] () keyword[in] [ literal[string] , literal[string] ]:
identifier[_first_word] = identifier[title] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[else] :
identifier[_first_word] = identifier[title] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[pub_id] = identifier[authors] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]+ identifier[_first_word] + identifier[str] ( identifier[year] )
keyword[return] identifier[pub_id]
|
def get_pub_id(title, authors, year):
"""construct publication id"""
if len(title.split(' ')) > 1 and title.split(' ')[0].lower() in ['the', 'a']:
_first_word = title.split(' ')[1].split('_')[0] # depends on [control=['if'], data=[]]
else:
_first_word = title.split(' ')[0].split('_')[0]
pub_id = authors[0].split(',')[0].split(' ')[0] + _first_word + str(year)
return pub_id
|
def remove_callback(self, handle):
"""Remove a callback."""
if self._poll is None:
raise RuntimeError('poll instance is closed')
remove_callback(self, handle)
if handle.extra & READABLE:
self._readers -= 1
if handle.extra & WRITABLE:
self._writers -= 1
self._sync()
|
def function[remove_callback, parameter[self, handle]]:
constant[Remove a callback.]
if compare[name[self]._poll is constant[None]] begin[:]
<ast.Raise object at 0x7da1b02461d0>
call[name[remove_callback], parameter[name[self], name[handle]]]
if binary_operation[name[handle].extra <ast.BitAnd object at 0x7da2590d6b60> name[READABLE]] begin[:]
<ast.AugAssign object at 0x7da1b02443a0>
if binary_operation[name[handle].extra <ast.BitAnd object at 0x7da2590d6b60> name[WRITABLE]] begin[:]
<ast.AugAssign object at 0x7da1b0246860>
call[name[self]._sync, parameter[]]
|
keyword[def] identifier[remove_callback] ( identifier[self] , identifier[handle] ):
literal[string]
keyword[if] identifier[self] . identifier[_poll] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[remove_callback] ( identifier[self] , identifier[handle] )
keyword[if] identifier[handle] . identifier[extra] & identifier[READABLE] :
identifier[self] . identifier[_readers] -= literal[int]
keyword[if] identifier[handle] . identifier[extra] & identifier[WRITABLE] :
identifier[self] . identifier[_writers] -= literal[int]
identifier[self] . identifier[_sync] ()
|
def remove_callback(self, handle):
"""Remove a callback."""
if self._poll is None:
raise RuntimeError('poll instance is closed') # depends on [control=['if'], data=[]]
remove_callback(self, handle)
if handle.extra & READABLE:
self._readers -= 1 # depends on [control=['if'], data=[]]
if handle.extra & WRITABLE:
self._writers -= 1 # depends on [control=['if'], data=[]]
self._sync()
|
def poke(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
bash_command = self.bash_command
self.log.info("Tmp dir root location: \n %s", gettempdir())
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(bash_command, 'utf_8'))
f.flush()
fname = f.name
script_location = tmp_dir + "/" + fname
self.log.info("Temporary script location: %s", script_location)
self.log.info("Running command: %s", bash_command)
sp = Popen(
['bash', fname],
stdout=PIPE, stderr=STDOUT,
close_fds=True, cwd=tmp_dir,
env=self.env, preexec_fn=os.setsid)
self.sp = sp
self.log.info("Output:")
line = ''
for line in iter(sp.stdout.readline, b''):
line = line.decode(self.output_encoding).strip()
self.log.info(line)
sp.wait()
self.log.info("Command exited with return code %s", sp.returncode)
return not sp.returncode
|
def function[poke, parameter[self, context]]:
constant[
Execute the bash command in a temporary directory
which will be cleaned afterwards
]
variable[bash_command] assign[=] name[self].bash_command
call[name[self].log.info, parameter[constant[Tmp dir root location:
%s], call[name[gettempdir], parameter[]]]]
with call[name[TemporaryDirectory], parameter[]] begin[:]
with call[name[NamedTemporaryFile], parameter[]] begin[:]
call[name[f].write, parameter[call[name[bytes], parameter[name[bash_command], constant[utf_8]]]]]
call[name[f].flush, parameter[]]
variable[fname] assign[=] name[f].name
variable[script_location] assign[=] binary_operation[binary_operation[name[tmp_dir] + constant[/]] + name[fname]]
call[name[self].log.info, parameter[constant[Temporary script location: %s], name[script_location]]]
call[name[self].log.info, parameter[constant[Running command: %s], name[bash_command]]]
variable[sp] assign[=] call[name[Popen], parameter[list[[<ast.Constant object at 0x7da2054a5f00>, <ast.Name object at 0x7da2054a44c0>]]]]
name[self].sp assign[=] name[sp]
call[name[self].log.info, parameter[constant[Output:]]]
variable[line] assign[=] constant[]
for taget[name[line]] in starred[call[name[iter], parameter[name[sp].stdout.readline, constant[b'']]]] begin[:]
variable[line] assign[=] call[call[name[line].decode, parameter[name[self].output_encoding]].strip, parameter[]]
call[name[self].log.info, parameter[name[line]]]
call[name[sp].wait, parameter[]]
call[name[self].log.info, parameter[constant[Command exited with return code %s], name[sp].returncode]]
return[<ast.UnaryOp object at 0x7da1b0594850>]
|
keyword[def] identifier[poke] ( identifier[self] , identifier[context] ):
literal[string]
identifier[bash_command] = identifier[self] . identifier[bash_command]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[gettempdir] ())
keyword[with] identifier[TemporaryDirectory] ( identifier[prefix] = literal[string] ) keyword[as] identifier[tmp_dir] :
keyword[with] identifier[NamedTemporaryFile] ( identifier[dir] = identifier[tmp_dir] , identifier[prefix] = identifier[self] . identifier[task_id] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[bytes] ( identifier[bash_command] , literal[string] ))
identifier[f] . identifier[flush] ()
identifier[fname] = identifier[f] . identifier[name]
identifier[script_location] = identifier[tmp_dir] + literal[string] + identifier[fname]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[script_location] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[bash_command] )
identifier[sp] = identifier[Popen] (
[ literal[string] , identifier[fname] ],
identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[STDOUT] ,
identifier[close_fds] = keyword[True] , identifier[cwd] = identifier[tmp_dir] ,
identifier[env] = identifier[self] . identifier[env] , identifier[preexec_fn] = identifier[os] . identifier[setsid] )
identifier[self] . identifier[sp] = identifier[sp]
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
identifier[line] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[iter] ( identifier[sp] . identifier[stdout] . identifier[readline] , literal[string] ):
identifier[line] = identifier[line] . identifier[decode] ( identifier[self] . identifier[output_encoding] ). identifier[strip] ()
identifier[self] . identifier[log] . identifier[info] ( identifier[line] )
identifier[sp] . identifier[wait] ()
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[sp] . identifier[returncode] )
keyword[return] keyword[not] identifier[sp] . identifier[returncode]
|
def poke(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
bash_command = self.bash_command
self.log.info('Tmp dir root location: \n %s', gettempdir())
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(bash_command, 'utf_8'))
f.flush()
fname = f.name
script_location = tmp_dir + '/' + fname
self.log.info('Temporary script location: %s', script_location)
self.log.info('Running command: %s', bash_command)
sp = Popen(['bash', fname], stdout=PIPE, stderr=STDOUT, close_fds=True, cwd=tmp_dir, env=self.env, preexec_fn=os.setsid)
self.sp = sp
self.log.info('Output:')
line = ''
for line in iter(sp.stdout.readline, b''):
line = line.decode(self.output_encoding).strip()
self.log.info(line) # depends on [control=['for'], data=['line']]
sp.wait()
self.log.info('Command exited with return code %s', sp.returncode)
return not sp.returncode # depends on [control=['with'], data=['f']] # depends on [control=['with'], data=['tmp_dir']]
|
def set_window_geometry(geometry):
"""Set window geometry.
Parameters
==========
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the Qt backend geometry.
"""
if geometry is not None:
x_geom, y_geom, dx_geom, dy_geom = geometry
mngr = plt.get_current_fig_manager()
if 'window' in dir(mngr):
try:
mngr.window.setGeometry(x_geom, y_geom, dx_geom, dy_geom)
except AttributeError:
pass
else:
pass
|
def function[set_window_geometry, parameter[geometry]]:
constant[Set window geometry.
Parameters
==========
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the Qt backend geometry.
]
if compare[name[geometry] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b24ecf70> assign[=] name[geometry]
variable[mngr] assign[=] call[name[plt].get_current_fig_manager, parameter[]]
if compare[constant[window] in call[name[dir], parameter[name[mngr]]]] begin[:]
<ast.Try object at 0x7da1b24ecc40>
|
keyword[def] identifier[set_window_geometry] ( identifier[geometry] ):
literal[string]
keyword[if] identifier[geometry] keyword[is] keyword[not] keyword[None] :
identifier[x_geom] , identifier[y_geom] , identifier[dx_geom] , identifier[dy_geom] = identifier[geometry]
identifier[mngr] = identifier[plt] . identifier[get_current_fig_manager] ()
keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[mngr] ):
keyword[try] :
identifier[mngr] . identifier[window] . identifier[setGeometry] ( identifier[x_geom] , identifier[y_geom] , identifier[dx_geom] , identifier[dy_geom] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
keyword[pass]
|
def set_window_geometry(geometry):
"""Set window geometry.
Parameters
==========
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the Qt backend geometry.
"""
if geometry is not None:
(x_geom, y_geom, dx_geom, dy_geom) = geometry
mngr = plt.get_current_fig_manager()
if 'window' in dir(mngr):
try:
mngr.window.setGeometry(x_geom, y_geom, dx_geom, dy_geom) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
pass # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['geometry']]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.