code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_help_keys(self):
"""
Returns dict of help_context keys (description texts used in `EmailRegistry.register()` method).
"""
help_keys = {}
for k, v in self.help_context.items():
if isinstance(v, tuple):
help_keys[k] = v[0]
else:
help_keys[k] = v
return help_keys | def function[get_help_keys, parameter[self]]:
constant[
Returns dict of help_context keys (description texts used in `EmailRegistry.register()` method).
]
variable[help_keys] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1940340>, <ast.Name object at 0x7da1b1940370>]]] in starred[call[name[self].help_context.items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[v], name[tuple]]] begin[:]
call[name[help_keys]][name[k]] assign[=] call[name[v]][constant[0]]
return[name[help_keys]] | keyword[def] identifier[get_help_keys] ( identifier[self] ):
literal[string]
identifier[help_keys] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[help_context] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[tuple] ):
identifier[help_keys] [ identifier[k] ]= identifier[v] [ literal[int] ]
keyword[else] :
identifier[help_keys] [ identifier[k] ]= identifier[v]
keyword[return] identifier[help_keys] | def get_help_keys(self):
"""
Returns dict of help_context keys (description texts used in `EmailRegistry.register()` method).
"""
help_keys = {}
for (k, v) in self.help_context.items():
if isinstance(v, tuple):
help_keys[k] = v[0] # depends on [control=['if'], data=[]]
else:
help_keys[k] = v # depends on [control=['for'], data=[]]
return help_keys |
def derive_signature(key, qs):
"""Derives the signature from the supplied query string using the key."""
key, qs = (key or "", qs or "")
return hmac.new(key.encode(), qs.encode(), hashlib.sha1).hexdigest() | def function[derive_signature, parameter[key, qs]]:
constant[Derives the signature from the supplied query string using the key.]
<ast.Tuple object at 0x7da18c4cd660> assign[=] tuple[[<ast.BoolOp object at 0x7da18c4cf730>, <ast.BoolOp object at 0x7da18c4cd6f0>]]
return[call[call[name[hmac].new, parameter[call[name[key].encode, parameter[]], call[name[qs].encode, parameter[]], name[hashlib].sha1]].hexdigest, parameter[]]] | keyword[def] identifier[derive_signature] ( identifier[key] , identifier[qs] ):
literal[string]
identifier[key] , identifier[qs] =( identifier[key] keyword[or] literal[string] , identifier[qs] keyword[or] literal[string] )
keyword[return] identifier[hmac] . identifier[new] ( identifier[key] . identifier[encode] (), identifier[qs] . identifier[encode] (), identifier[hashlib] . identifier[sha1] ). identifier[hexdigest] () | def derive_signature(key, qs):
"""Derives the signature from the supplied query string using the key."""
(key, qs) = (key or '', qs or '')
return hmac.new(key.encode(), qs.encode(), hashlib.sha1).hexdigest() |
def readlines(self, encoding=None):
"""Reads from the file and returns result as a list of lines."""
try:
encoding = encoding or ENCODING
with codecs.open(self.path, encoding=None) as fi:
return fi.readlines()
except:
return [] | def function[readlines, parameter[self, encoding]]:
constant[Reads from the file and returns result as a list of lines.]
<ast.Try object at 0x7da18bcc9ae0> | keyword[def] identifier[readlines] ( identifier[self] , identifier[encoding] = keyword[None] ):
literal[string]
keyword[try] :
identifier[encoding] = identifier[encoding] keyword[or] identifier[ENCODING]
keyword[with] identifier[codecs] . identifier[open] ( identifier[self] . identifier[path] , identifier[encoding] = keyword[None] ) keyword[as] identifier[fi] :
keyword[return] identifier[fi] . identifier[readlines] ()
keyword[except] :
keyword[return] [] | def readlines(self, encoding=None):
"""Reads from the file and returns result as a list of lines."""
try:
encoding = encoding or ENCODING
with codecs.open(self.path, encoding=None) as fi:
return fi.readlines() # depends on [control=['with'], data=['fi']] # depends on [control=['try'], data=[]]
except:
return [] # depends on [control=['except'], data=[]] |
def _entry_must_exist(df, k1, k2):
"""Evaluate key-subkey existence.
Checks that the key-subkey combo exists in the
configuration options.
"""
count = df[(df['k1'] == k1) &
(df['k2'] == k2)].shape[0]
if count == 0:
raise NotRegisteredError(
"Option {0}.{1} not registered".format(k1, k2)) | def function[_entry_must_exist, parameter[df, k1, k2]]:
constant[Evaluate key-subkey existence.
Checks that the key-subkey combo exists in the
configuration options.
]
variable[count] assign[=] call[call[name[df]][binary_operation[compare[call[name[df]][constant[k1]] equal[==] name[k1]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[df]][constant[k2]] equal[==] name[k2]]]].shape][constant[0]]
if compare[name[count] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1e206a0> | keyword[def] identifier[_entry_must_exist] ( identifier[df] , identifier[k1] , identifier[k2] ):
literal[string]
identifier[count] = identifier[df] [( identifier[df] [ literal[string] ]== identifier[k1] )&
( identifier[df] [ literal[string] ]== identifier[k2] )]. identifier[shape] [ literal[int] ]
keyword[if] identifier[count] == literal[int] :
keyword[raise] identifier[NotRegisteredError] (
literal[string] . identifier[format] ( identifier[k1] , identifier[k2] )) | def _entry_must_exist(df, k1, k2):
"""Evaluate key-subkey existence.
Checks that the key-subkey combo exists in the
configuration options.
"""
count = df[(df['k1'] == k1) & (df['k2'] == k2)].shape[0]
if count == 0:
raise NotRegisteredError('Option {0}.{1} not registered'.format(k1, k2)) # depends on [control=['if'], data=[]] |
def attackers(self, color: Color, square: Square) -> "SquareSet":
"""
Gets a set of attackers of the given color for the given square.
Pinned pieces still count as attackers.
Returns a :class:`set of squares <chess.SquareSet>`.
"""
return SquareSet(self.attackers_mask(color, square)) | def function[attackers, parameter[self, color, square]]:
constant[
Gets a set of attackers of the given color for the given square.
Pinned pieces still count as attackers.
Returns a :class:`set of squares <chess.SquareSet>`.
]
return[call[name[SquareSet], parameter[call[name[self].attackers_mask, parameter[name[color], name[square]]]]]] | keyword[def] identifier[attackers] ( identifier[self] , identifier[color] : identifier[Color] , identifier[square] : identifier[Square] )-> literal[string] :
literal[string]
keyword[return] identifier[SquareSet] ( identifier[self] . identifier[attackers_mask] ( identifier[color] , identifier[square] )) | def attackers(self, color: Color, square: Square) -> 'SquareSet':
"""
Gets a set of attackers of the given color for the given square.
Pinned pieces still count as attackers.
Returns a :class:`set of squares <chess.SquareSet>`.
"""
return SquareSet(self.attackers_mask(color, square)) |
def save_datasets(self, datasets, filename=None, **kwargs):
"""Save all datasets to one or more files."""
logger.info('Saving datasets to NetCDF4/CF.')
# XXX: Should we combine the info of all datasets?
filename = filename or self.get_filename(**datasets[0].attrs)
datas, start_times, end_times = self._collect_datasets(datasets, kwargs)
dataset = xr.Dataset(datas)
try:
dataset['time_bnds'] = make_time_bounds(dataset,
start_times,
end_times)
dataset['time'].attrs['bounds'] = "time_bnds"
except KeyError:
logger.warning('No time dimension in datasets, skipping time bounds creation.')
header_attrs = kwargs.pop('header_attrs', None)
if header_attrs is not None:
dataset.attrs.update({k: v for k, v in header_attrs.items() if v})
dataset.attrs['history'] = ("Created by pytroll/satpy on " +
str(datetime.utcnow()))
dataset.attrs['conventions'] = 'CF-1.7'
engine = kwargs.pop("engine", 'h5netcdf')
for key in list(kwargs.keys()):
if key not in ['mode', 'format', 'group', 'encoding', 'unlimited_dims', 'compute']:
kwargs.pop(key, None)
return dataset.to_netcdf(filename, engine=engine, **kwargs) | def function[save_datasets, parameter[self, datasets, filename]]:
constant[Save all datasets to one or more files.]
call[name[logger].info, parameter[constant[Saving datasets to NetCDF4/CF.]]]
variable[filename] assign[=] <ast.BoolOp object at 0x7da1b22bb370>
<ast.Tuple object at 0x7da1b22ba5c0> assign[=] call[name[self]._collect_datasets, parameter[name[datasets], name[kwargs]]]
variable[dataset] assign[=] call[name[xr].Dataset, parameter[name[datas]]]
<ast.Try object at 0x7da1b22ba9b0>
variable[header_attrs] assign[=] call[name[kwargs].pop, parameter[constant[header_attrs], constant[None]]]
if compare[name[header_attrs] is_not constant[None]] begin[:]
call[name[dataset].attrs.update, parameter[<ast.DictComp object at 0x7da1b22f9750>]]
call[name[dataset].attrs][constant[history]] assign[=] binary_operation[constant[Created by pytroll/satpy on ] + call[name[str], parameter[call[name[datetime].utcnow, parameter[]]]]]
call[name[dataset].attrs][constant[conventions]] assign[=] constant[CF-1.7]
variable[engine] assign[=] call[name[kwargs].pop, parameter[constant[engine], constant[h5netcdf]]]
for taget[name[key]] in starred[call[name[list], parameter[call[name[kwargs].keys, parameter[]]]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b22f8d60>, <ast.Constant object at 0x7da1b22f8d30>, <ast.Constant object at 0x7da1b22f8df0>, <ast.Constant object at 0x7da1b22f8d00>, <ast.Constant object at 0x7da1b22f8c70>, <ast.Constant object at 0x7da1b22f8c40>]]] begin[:]
call[name[kwargs].pop, parameter[name[key], constant[None]]]
return[call[name[dataset].to_netcdf, parameter[name[filename]]]] | keyword[def] identifier[save_datasets] ( identifier[self] , identifier[datasets] , identifier[filename] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
identifier[filename] = identifier[filename] keyword[or] identifier[self] . identifier[get_filename] (** identifier[datasets] [ literal[int] ]. identifier[attrs] )
identifier[datas] , identifier[start_times] , identifier[end_times] = identifier[self] . identifier[_collect_datasets] ( identifier[datasets] , identifier[kwargs] )
identifier[dataset] = identifier[xr] . identifier[Dataset] ( identifier[datas] )
keyword[try] :
identifier[dataset] [ literal[string] ]= identifier[make_time_bounds] ( identifier[dataset] ,
identifier[start_times] ,
identifier[end_times] )
identifier[dataset] [ literal[string] ]. identifier[attrs] [ literal[string] ]= literal[string]
keyword[except] identifier[KeyError] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[header_attrs] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[header_attrs] keyword[is] keyword[not] keyword[None] :
identifier[dataset] . identifier[attrs] . identifier[update] ({ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[header_attrs] . identifier[items] () keyword[if] identifier[v] })
identifier[dataset] . identifier[attrs] [ literal[string] ]=( literal[string] +
identifier[str] ( identifier[datetime] . identifier[utcnow] ()))
identifier[dataset] . identifier[attrs] [ literal[string] ]= literal[string]
identifier[engine] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[kwargs] . identifier[keys] ()):
keyword[if] identifier[key] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[kwargs] . identifier[pop] ( identifier[key] , keyword[None] )
keyword[return] identifier[dataset] . identifier[to_netcdf] ( identifier[filename] , identifier[engine] = identifier[engine] ,** identifier[kwargs] ) | def save_datasets(self, datasets, filename=None, **kwargs):
"""Save all datasets to one or more files."""
logger.info('Saving datasets to NetCDF4/CF.')
# XXX: Should we combine the info of all datasets?
filename = filename or self.get_filename(**datasets[0].attrs)
(datas, start_times, end_times) = self._collect_datasets(datasets, kwargs)
dataset = xr.Dataset(datas)
try:
dataset['time_bnds'] = make_time_bounds(dataset, start_times, end_times)
dataset['time'].attrs['bounds'] = 'time_bnds' # depends on [control=['try'], data=[]]
except KeyError:
logger.warning('No time dimension in datasets, skipping time bounds creation.') # depends on [control=['except'], data=[]]
header_attrs = kwargs.pop('header_attrs', None)
if header_attrs is not None:
dataset.attrs.update({k: v for (k, v) in header_attrs.items() if v}) # depends on [control=['if'], data=['header_attrs']]
dataset.attrs['history'] = 'Created by pytroll/satpy on ' + str(datetime.utcnow())
dataset.attrs['conventions'] = 'CF-1.7'
engine = kwargs.pop('engine', 'h5netcdf')
for key in list(kwargs.keys()):
if key not in ['mode', 'format', 'group', 'encoding', 'unlimited_dims', 'compute']:
kwargs.pop(key, None) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
return dataset.to_netcdf(filename, engine=engine, **kwargs) |
def update(self):
"""Update load stats."""
# Init new stats
stats = self.get_init_value()
if self.input_method == 'local':
# Update stats using the standard system lib
# Get the load using the os standard lib
load = self._getloadavg()
if load is None:
stats = self.get_init_value()
else:
stats = {'min1': load[0],
'min5': load[1],
'min15': load[2],
'cpucore': self.nb_log_core}
elif self.input_method == 'snmp':
# Update stats using SNMP
stats = self.get_stats_snmp(snmp_oid=snmp_oid)
if stats['min1'] == '':
stats = self.get_init_value()
return stats
# Python 3 return a dict like:
# {'min1': "b'0.08'", 'min5': "b'0.12'", 'min15': "b'0.15'"}
for k, v in iteritems(stats):
stats[k] = float(v)
stats['cpucore'] = self.nb_log_core
# Update the stats
self.stats = stats
return self.stats | def function[update, parameter[self]]:
constant[Update load stats.]
variable[stats] assign[=] call[name[self].get_init_value, parameter[]]
if compare[name[self].input_method equal[==] constant[local]] begin[:]
variable[load] assign[=] call[name[self]._getloadavg, parameter[]]
if compare[name[load] is constant[None]] begin[:]
variable[stats] assign[=] call[name[self].get_init_value, parameter[]]
name[self].stats assign[=] name[stats]
return[name[self].stats] | keyword[def] identifier[update] ( identifier[self] ):
literal[string]
identifier[stats] = identifier[self] . identifier[get_init_value] ()
keyword[if] identifier[self] . identifier[input_method] == literal[string] :
identifier[load] = identifier[self] . identifier[_getloadavg] ()
keyword[if] identifier[load] keyword[is] keyword[None] :
identifier[stats] = identifier[self] . identifier[get_init_value] ()
keyword[else] :
identifier[stats] ={ literal[string] : identifier[load] [ literal[int] ],
literal[string] : identifier[load] [ literal[int] ],
literal[string] : identifier[load] [ literal[int] ],
literal[string] : identifier[self] . identifier[nb_log_core] }
keyword[elif] identifier[self] . identifier[input_method] == literal[string] :
identifier[stats] = identifier[self] . identifier[get_stats_snmp] ( identifier[snmp_oid] = identifier[snmp_oid] )
keyword[if] identifier[stats] [ literal[string] ]== literal[string] :
identifier[stats] = identifier[self] . identifier[get_init_value] ()
keyword[return] identifier[stats]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[stats] ):
identifier[stats] [ identifier[k] ]= identifier[float] ( identifier[v] )
identifier[stats] [ literal[string] ]= identifier[self] . identifier[nb_log_core]
identifier[self] . identifier[stats] = identifier[stats]
keyword[return] identifier[self] . identifier[stats] | def update(self):
"""Update load stats."""
# Init new stats
stats = self.get_init_value()
if self.input_method == 'local':
# Update stats using the standard system lib
# Get the load using the os standard lib
load = self._getloadavg()
if load is None:
stats = self.get_init_value() # depends on [control=['if'], data=[]]
else:
stats = {'min1': load[0], 'min5': load[1], 'min15': load[2], 'cpucore': self.nb_log_core} # depends on [control=['if'], data=[]]
elif self.input_method == 'snmp':
# Update stats using SNMP
stats = self.get_stats_snmp(snmp_oid=snmp_oid)
if stats['min1'] == '':
stats = self.get_init_value()
return stats # depends on [control=['if'], data=[]]
# Python 3 return a dict like:
# {'min1': "b'0.08'", 'min5': "b'0.12'", 'min15': "b'0.15'"}
for (k, v) in iteritems(stats):
stats[k] = float(v) # depends on [control=['for'], data=[]]
stats['cpucore'] = self.nb_log_core # depends on [control=['if'], data=[]]
# Update the stats
self.stats = stats
return self.stats |
def start(self):
""" start the channel listener and start consuming messages """
self.connections(True)
binding_keys = sys.argv[1:]
if not binding_keys:
print(sys.stderr,
'Usage: {0!s} [binding_key]...'.format(sys.argv[0]))
sys.exit(0)
for binding_key in binding_keys:
self.channel.queue_bind(exchange='topic_recs',
queue=self.queue_name,
routing_key=binding_key) | def function[start, parameter[self]]:
constant[ start the channel listener and start consuming messages ]
call[name[self].connections, parameter[constant[True]]]
variable[binding_keys] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da18f720460>]
if <ast.UnaryOp object at 0x7da207f01de0> begin[:]
call[name[print], parameter[name[sys].stderr, call[constant[Usage: {0!s} [binding_key]...].format, parameter[call[name[sys].argv][constant[0]]]]]]
call[name[sys].exit, parameter[constant[0]]]
for taget[name[binding_key]] in starred[name[binding_keys]] begin[:]
call[name[self].channel.queue_bind, parameter[]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[self] . identifier[connections] ( keyword[True] )
identifier[binding_keys] = identifier[sys] . identifier[argv] [ literal[int] :]
keyword[if] keyword[not] identifier[binding_keys] :
identifier[print] ( identifier[sys] . identifier[stderr] ,
literal[string] . identifier[format] ( identifier[sys] . identifier[argv] [ literal[int] ]))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[for] identifier[binding_key] keyword[in] identifier[binding_keys] :
identifier[self] . identifier[channel] . identifier[queue_bind] ( identifier[exchange] = literal[string] ,
identifier[queue] = identifier[self] . identifier[queue_name] ,
identifier[routing_key] = identifier[binding_key] ) | def start(self):
""" start the channel listener and start consuming messages """
self.connections(True)
binding_keys = sys.argv[1:]
if not binding_keys:
print(sys.stderr, 'Usage: {0!s} [binding_key]...'.format(sys.argv[0]))
sys.exit(0) # depends on [control=['if'], data=[]]
for binding_key in binding_keys:
self.channel.queue_bind(exchange='topic_recs', queue=self.queue_name, routing_key=binding_key) # depends on [control=['for'], data=['binding_key']] |
def wholeTextFiles(self, path, minPartitions=None, use_unicode=True):
"""
Read a directory of text files from HDFS, a local file system
(available on all nodes), or any Hadoop-supported file system
URI. Each file is read as a single record and returned in a
key-value pair, where the key is the path of each file, the
value is the content of each file.
The text files must be encoded as UTF-8.
If use_unicode is False, the strings will be kept as `str` (encoding
as `utf-8`), which is faster and smaller than unicode. (Added in
Spark 1.2)
For example, if you have the following files::
hdfs://a-hdfs-path/part-00000
hdfs://a-hdfs-path/part-00001
...
hdfs://a-hdfs-path/part-nnnnn
Do C{rdd = sparkContext.wholeTextFiles("hdfs://a-hdfs-path")},
then C{rdd} contains::
(a-hdfs-path/part-00000, its content)
(a-hdfs-path/part-00001, its content)
...
(a-hdfs-path/part-nnnnn, its content)
.. note:: Small files are preferred, as each file will be loaded
fully in memory.
>>> dirPath = os.path.join(tempdir, "files")
>>> os.mkdir(dirPath)
>>> with open(os.path.join(dirPath, "1.txt"), "w") as file1:
... _ = file1.write("1")
>>> with open(os.path.join(dirPath, "2.txt"), "w") as file2:
... _ = file2.write("2")
>>> textFiles = sc.wholeTextFiles(dirPath)
>>> sorted(textFiles.collect())
[(u'.../1.txt', u'1'), (u'.../2.txt', u'2')]
"""
minPartitions = minPartitions or self.defaultMinPartitions
return RDD(self._jsc.wholeTextFiles(path, minPartitions), self,
PairDeserializer(UTF8Deserializer(use_unicode), UTF8Deserializer(use_unicode))) | def function[wholeTextFiles, parameter[self, path, minPartitions, use_unicode]]:
constant[
Read a directory of text files from HDFS, a local file system
(available on all nodes), or any Hadoop-supported file system
URI. Each file is read as a single record and returned in a
key-value pair, where the key is the path of each file, the
value is the content of each file.
The text files must be encoded as UTF-8.
If use_unicode is False, the strings will be kept as `str` (encoding
as `utf-8`), which is faster and smaller than unicode. (Added in
Spark 1.2)
For example, if you have the following files::
hdfs://a-hdfs-path/part-00000
hdfs://a-hdfs-path/part-00001
...
hdfs://a-hdfs-path/part-nnnnn
Do C{rdd = sparkContext.wholeTextFiles("hdfs://a-hdfs-path")},
then C{rdd} contains::
(a-hdfs-path/part-00000, its content)
(a-hdfs-path/part-00001, its content)
...
(a-hdfs-path/part-nnnnn, its content)
.. note:: Small files are preferred, as each file will be loaded
fully in memory.
>>> dirPath = os.path.join(tempdir, "files")
>>> os.mkdir(dirPath)
>>> with open(os.path.join(dirPath, "1.txt"), "w") as file1:
... _ = file1.write("1")
>>> with open(os.path.join(dirPath, "2.txt"), "w") as file2:
... _ = file2.write("2")
>>> textFiles = sc.wholeTextFiles(dirPath)
>>> sorted(textFiles.collect())
[(u'.../1.txt', u'1'), (u'.../2.txt', u'2')]
]
variable[minPartitions] assign[=] <ast.BoolOp object at 0x7da1b1d57430>
return[call[name[RDD], parameter[call[name[self]._jsc.wholeTextFiles, parameter[name[path], name[minPartitions]]], name[self], call[name[PairDeserializer], parameter[call[name[UTF8Deserializer], parameter[name[use_unicode]]], call[name[UTF8Deserializer], parameter[name[use_unicode]]]]]]]] | keyword[def] identifier[wholeTextFiles] ( identifier[self] , identifier[path] , identifier[minPartitions] = keyword[None] , identifier[use_unicode] = keyword[True] ):
literal[string]
identifier[minPartitions] = identifier[minPartitions] keyword[or] identifier[self] . identifier[defaultMinPartitions]
keyword[return] identifier[RDD] ( identifier[self] . identifier[_jsc] . identifier[wholeTextFiles] ( identifier[path] , identifier[minPartitions] ), identifier[self] ,
identifier[PairDeserializer] ( identifier[UTF8Deserializer] ( identifier[use_unicode] ), identifier[UTF8Deserializer] ( identifier[use_unicode] ))) | def wholeTextFiles(self, path, minPartitions=None, use_unicode=True):
"""
Read a directory of text files from HDFS, a local file system
(available on all nodes), or any Hadoop-supported file system
URI. Each file is read as a single record and returned in a
key-value pair, where the key is the path of each file, the
value is the content of each file.
The text files must be encoded as UTF-8.
If use_unicode is False, the strings will be kept as `str` (encoding
as `utf-8`), which is faster and smaller than unicode. (Added in
Spark 1.2)
For example, if you have the following files::
hdfs://a-hdfs-path/part-00000
hdfs://a-hdfs-path/part-00001
...
hdfs://a-hdfs-path/part-nnnnn
Do C{rdd = sparkContext.wholeTextFiles("hdfs://a-hdfs-path")},
then C{rdd} contains::
(a-hdfs-path/part-00000, its content)
(a-hdfs-path/part-00001, its content)
...
(a-hdfs-path/part-nnnnn, its content)
.. note:: Small files are preferred, as each file will be loaded
fully in memory.
>>> dirPath = os.path.join(tempdir, "files")
>>> os.mkdir(dirPath)
>>> with open(os.path.join(dirPath, "1.txt"), "w") as file1:
... _ = file1.write("1")
>>> with open(os.path.join(dirPath, "2.txt"), "w") as file2:
... _ = file2.write("2")
>>> textFiles = sc.wholeTextFiles(dirPath)
>>> sorted(textFiles.collect())
[(u'.../1.txt', u'1'), (u'.../2.txt', u'2')]
"""
minPartitions = minPartitions or self.defaultMinPartitions
return RDD(self._jsc.wholeTextFiles(path, minPartitions), self, PairDeserializer(UTF8Deserializer(use_unicode), UTF8Deserializer(use_unicode))) |
def get_checksum(self):
"""
Returns a checksum based on the IDL that ignores comments and
ordering, but detects changes to types, parameter order,
and enum values.
"""
arr = [ ]
for elem in self.parsed:
s = elem_checksum(elem)
if s:
arr.append(s)
arr.sort()
#print arr
return md5(json.dumps(arr)) | def function[get_checksum, parameter[self]]:
constant[
Returns a checksum based on the IDL that ignores comments and
ordering, but detects changes to types, parameter order,
and enum values.
]
variable[arr] assign[=] list[[]]
for taget[name[elem]] in starred[name[self].parsed] begin[:]
variable[s] assign[=] call[name[elem_checksum], parameter[name[elem]]]
if name[s] begin[:]
call[name[arr].append, parameter[name[s]]]
call[name[arr].sort, parameter[]]
return[call[name[md5], parameter[call[name[json].dumps, parameter[name[arr]]]]]] | keyword[def] identifier[get_checksum] ( identifier[self] ):
literal[string]
identifier[arr] =[]
keyword[for] identifier[elem] keyword[in] identifier[self] . identifier[parsed] :
identifier[s] = identifier[elem_checksum] ( identifier[elem] )
keyword[if] identifier[s] :
identifier[arr] . identifier[append] ( identifier[s] )
identifier[arr] . identifier[sort] ()
keyword[return] identifier[md5] ( identifier[json] . identifier[dumps] ( identifier[arr] )) | def get_checksum(self):
"""
Returns a checksum based on the IDL that ignores comments and
ordering, but detects changes to types, parameter order,
and enum values.
"""
arr = []
for elem in self.parsed:
s = elem_checksum(elem)
if s:
arr.append(s) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem']]
arr.sort()
#print arr
return md5(json.dumps(arr)) |
def end(self):
"""
Terminate (and then kill) the manager process launched.
:return:
"""
if not self._process:
self.log.warn('Ending without manager process.')
return
this_process = psutil.Process(os.getpid())
try:
manager_process = psutil.Process(self._process.pid)
except psutil.NoSuchProcess:
self.log.info("Manager process not running.")
return
# First try SIGTERM
if manager_process.is_running() \
and manager_process.pid in [x.pid for x in this_process.children()]:
self.log.info("Terminating manager process: %s", manager_process.pid)
manager_process.terminate()
# TODO: Remove magic number
timeout = 5
self.log.info("Waiting up to %ss for manager process to exit...", timeout)
try:
psutil.wait_procs({manager_process}, timeout)
except psutil.TimeoutExpired:
self.log.debug("Ran out of time while waiting for "
"processes to exit")
# Then SIGKILL
if manager_process.is_running() \
and manager_process.pid in [x.pid for x in this_process.children()]:
self.log.info("Killing manager process: %s", manager_process.pid)
manager_process.kill()
manager_process.wait() | def function[end, parameter[self]]:
constant[
Terminate (and then kill) the manager process launched.
:return:
]
if <ast.UnaryOp object at 0x7da1b055ba00> begin[:]
call[name[self].log.warn, parameter[constant[Ending without manager process.]]]
return[None]
variable[this_process] assign[=] call[name[psutil].Process, parameter[call[name[os].getpid, parameter[]]]]
<ast.Try object at 0x7da1b0558940>
if <ast.BoolOp object at 0x7da1b052a7d0> begin[:]
call[name[self].log.info, parameter[constant[Terminating manager process: %s], name[manager_process].pid]]
call[name[manager_process].terminate, parameter[]]
variable[timeout] assign[=] constant[5]
call[name[self].log.info, parameter[constant[Waiting up to %ss for manager process to exit...], name[timeout]]]
<ast.Try object at 0x7da20c6c45b0>
if <ast.BoolOp object at 0x7da20c6c5570> begin[:]
call[name[self].log.info, parameter[constant[Killing manager process: %s], name[manager_process].pid]]
call[name[manager_process].kill, parameter[]]
call[name[manager_process].wait, parameter[]] | keyword[def] identifier[end] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_process] :
identifier[self] . identifier[log] . identifier[warn] ( literal[string] )
keyword[return]
identifier[this_process] = identifier[psutil] . identifier[Process] ( identifier[os] . identifier[getpid] ())
keyword[try] :
identifier[manager_process] = identifier[psutil] . identifier[Process] ( identifier[self] . identifier[_process] . identifier[pid] )
keyword[except] identifier[psutil] . identifier[NoSuchProcess] :
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
keyword[return]
keyword[if] identifier[manager_process] . identifier[is_running] () keyword[and] identifier[manager_process] . identifier[pid] keyword[in] [ identifier[x] . identifier[pid] keyword[for] identifier[x] keyword[in] identifier[this_process] . identifier[children] ()]:
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[manager_process] . identifier[pid] )
identifier[manager_process] . identifier[terminate] ()
identifier[timeout] = literal[int]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[timeout] )
keyword[try] :
identifier[psutil] . identifier[wait_procs] ({ identifier[manager_process] }, identifier[timeout] )
keyword[except] identifier[psutil] . identifier[TimeoutExpired] :
identifier[self] . identifier[log] . identifier[debug] ( literal[string]
literal[string] )
keyword[if] identifier[manager_process] . identifier[is_running] () keyword[and] identifier[manager_process] . identifier[pid] keyword[in] [ identifier[x] . identifier[pid] keyword[for] identifier[x] keyword[in] identifier[this_process] . identifier[children] ()]:
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[manager_process] . identifier[pid] )
identifier[manager_process] . identifier[kill] ()
identifier[manager_process] . identifier[wait] () | def end(self):
"""
Terminate (and then kill) the manager process launched.
:return:
"""
if not self._process:
self.log.warn('Ending without manager process.')
return # depends on [control=['if'], data=[]]
this_process = psutil.Process(os.getpid())
try:
manager_process = psutil.Process(self._process.pid) # depends on [control=['try'], data=[]]
except psutil.NoSuchProcess:
self.log.info('Manager process not running.')
return # depends on [control=['except'], data=[]]
# First try SIGTERM
if manager_process.is_running() and manager_process.pid in [x.pid for x in this_process.children()]:
self.log.info('Terminating manager process: %s', manager_process.pid)
manager_process.terminate()
# TODO: Remove magic number
timeout = 5
self.log.info('Waiting up to %ss for manager process to exit...', timeout)
try:
psutil.wait_procs({manager_process}, timeout) # depends on [control=['try'], data=[]]
except psutil.TimeoutExpired:
self.log.debug('Ran out of time while waiting for processes to exit') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Then SIGKILL
if manager_process.is_running() and manager_process.pid in [x.pid for x in this_process.children()]:
self.log.info('Killing manager process: %s', manager_process.pid)
manager_process.kill()
manager_process.wait() # depends on [control=['if'], data=[]] |
def delete(self, *args, **kwargs):
""" custom delete method to update counts """
super(UpdateCountsMixin, self).delete(*args, **kwargs)
self.update_count() | def function[delete, parameter[self]]:
constant[ custom delete method to update counts ]
call[call[name[super], parameter[name[UpdateCountsMixin], name[self]]].delete, parameter[<ast.Starred object at 0x7da18c4cd630>]]
call[name[self].update_count, parameter[]] | keyword[def] identifier[delete] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[super] ( identifier[UpdateCountsMixin] , identifier[self] ). identifier[delete] (* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[update_count] () | def delete(self, *args, **kwargs):
""" custom delete method to update counts """
super(UpdateCountsMixin, self).delete(*args, **kwargs)
self.update_count() |
def rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_e = ET.SubElement(cmdlist, "interface-e")
interface_te_leaf = ET.SubElement(interface_e, "interface-te-leaf")
interface = ET.SubElement(interface_te_leaf, "interface")
tengigabitethernet_leaf = ET.SubElement(interface, "tengigabitethernet-leaf")
tengigabitethernet_leaf.text = kwargs.pop('tengigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[rule] assign[=] call[name[ET].SubElement, parameter[name[config], constant[rule]]]
variable[index_key] assign[=] call[name[ET].SubElement, parameter[name[rule], constant[index]]]
name[index_key].text assign[=] call[name[kwargs].pop, parameter[constant[index]]]
variable[command] assign[=] call[name[ET].SubElement, parameter[name[rule], constant[command]]]
variable[cmdlist] assign[=] call[name[ET].SubElement, parameter[name[command], constant[cmdlist]]]
variable[interface_e] assign[=] call[name[ET].SubElement, parameter[name[cmdlist], constant[interface-e]]]
variable[interface_te_leaf] assign[=] call[name[ET].SubElement, parameter[name[interface_e], constant[interface-te-leaf]]]
variable[interface] assign[=] call[name[ET].SubElement, parameter[name[interface_te_leaf], constant[interface]]]
variable[tengigabitethernet_leaf] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[tengigabitethernet-leaf]]]
name[tengigabitethernet_leaf].text assign[=] call[name[kwargs].pop, parameter[constant[tengigabitethernet_leaf]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[rule] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[index_key] = identifier[ET] . identifier[SubElement] ( identifier[rule] , literal[string] )
identifier[index_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[command] = identifier[ET] . identifier[SubElement] ( identifier[rule] , literal[string] )
identifier[cmdlist] = identifier[ET] . identifier[SubElement] ( identifier[command] , literal[string] )
identifier[interface_e] = identifier[ET] . identifier[SubElement] ( identifier[cmdlist] , literal[string] )
identifier[interface_te_leaf] = identifier[ET] . identifier[SubElement] ( identifier[interface_e] , literal[string] )
identifier[interface] = identifier[ET] . identifier[SubElement] ( identifier[interface_te_leaf] , literal[string] )
identifier[tengigabitethernet_leaf] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] )
identifier[tengigabitethernet_leaf] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
rule = ET.SubElement(config, 'rule', xmlns='urn:brocade.com:mgmt:brocade-aaa')
index_key = ET.SubElement(rule, 'index')
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, 'command')
cmdlist = ET.SubElement(command, 'cmdlist')
interface_e = ET.SubElement(cmdlist, 'interface-e')
interface_te_leaf = ET.SubElement(interface_e, 'interface-te-leaf')
interface = ET.SubElement(interface_te_leaf, 'interface')
tengigabitethernet_leaf = ET.SubElement(interface, 'tengigabitethernet-leaf')
tengigabitethernet_leaf.text = kwargs.pop('tengigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def serialize(obj):
"""Convert objects into JSON structures."""
# Record class and module information for deserialization
result = {'__class__': obj.__class__.__name__}
try:
result['__module__'] = obj.__module__
except AttributeError:
pass
# Convert objects to dictionary representation based on type
if isinstance(obj, datetime.datetime):
result['year'] = obj.year
result['month'] = obj.month
result['day'] = obj.day
result['hour'] = obj.hour
result['minute'] = obj.minute
result['second'] = obj.second
result['microsecond'] = obj.microsecond
return result
if isinstance(obj, StreamingBody):
result['body'] = obj.read()
obj._raw_stream = StringIO(result['body'])
obj._amount_read = 0
return result
# Raise a TypeError if the object isn't recognized
raise TypeError("Type not serializable") | def function[serialize, parameter[obj]]:
constant[Convert objects into JSON structures.]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a23a60>], [<ast.Attribute object at 0x7da1b1a22c80>]]
<ast.Try object at 0x7da1b1a20070>
if call[name[isinstance], parameter[name[obj], name[datetime].datetime]] begin[:]
call[name[result]][constant[year]] assign[=] name[obj].year
call[name[result]][constant[month]] assign[=] name[obj].month
call[name[result]][constant[day]] assign[=] name[obj].day
call[name[result]][constant[hour]] assign[=] name[obj].hour
call[name[result]][constant[minute]] assign[=] name[obj].minute
call[name[result]][constant[second]] assign[=] name[obj].second
call[name[result]][constant[microsecond]] assign[=] name[obj].microsecond
return[name[result]]
if call[name[isinstance], parameter[name[obj], name[StreamingBody]]] begin[:]
call[name[result]][constant[body]] assign[=] call[name[obj].read, parameter[]]
name[obj]._raw_stream assign[=] call[name[StringIO], parameter[call[name[result]][constant[body]]]]
name[obj]._amount_read assign[=] constant[0]
return[name[result]]
<ast.Raise object at 0x7da1b1a206d0> | keyword[def] identifier[serialize] ( identifier[obj] ):
literal[string]
identifier[result] ={ literal[string] : identifier[obj] . identifier[__class__] . identifier[__name__] }
keyword[try] :
identifier[result] [ literal[string] ]= identifier[obj] . identifier[__module__]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[datetime] . identifier[datetime] ):
identifier[result] [ literal[string] ]= identifier[obj] . identifier[year]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[month]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[day]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[hour]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[minute]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[second]
identifier[result] [ literal[string] ]= identifier[obj] . identifier[microsecond]
keyword[return] identifier[result]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[StreamingBody] ):
identifier[result] [ literal[string] ]= identifier[obj] . identifier[read] ()
identifier[obj] . identifier[_raw_stream] = identifier[StringIO] ( identifier[result] [ literal[string] ])
identifier[obj] . identifier[_amount_read] = literal[int]
keyword[return] identifier[result]
keyword[raise] identifier[TypeError] ( literal[string] ) | def serialize(obj):
"""Convert objects into JSON structures."""
# Record class and module information for deserialization
result = {'__class__': obj.__class__.__name__}
try:
result['__module__'] = obj.__module__ # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
# Convert objects to dictionary representation based on type
if isinstance(obj, datetime.datetime):
result['year'] = obj.year
result['month'] = obj.month
result['day'] = obj.day
result['hour'] = obj.hour
result['minute'] = obj.minute
result['second'] = obj.second
result['microsecond'] = obj.microsecond
return result # depends on [control=['if'], data=[]]
if isinstance(obj, StreamingBody):
result['body'] = obj.read()
obj._raw_stream = StringIO(result['body'])
obj._amount_read = 0
return result # depends on [control=['if'], data=[]]
# Raise a TypeError if the object isn't recognized
raise TypeError('Type not serializable') |
def to_signed(cls, type_):
""" Return signed type or equivalent
"""
if type_ in cls.unsigned:
return {TYPE.ubyte: TYPE.byte_,
TYPE.uinteger: TYPE.integer,
TYPE.ulong: TYPE.long_}[type_]
if type_ in cls.decimals or type_ in cls.signed:
return type_
return cls.unknown | def function[to_signed, parameter[cls, type_]]:
constant[ Return signed type or equivalent
]
if compare[name[type_] in name[cls].unsigned] begin[:]
return[call[dictionary[[<ast.Attribute object at 0x7da18f58ea40>, <ast.Attribute object at 0x7da18f58d6c0>, <ast.Attribute object at 0x7da18f58d030>], [<ast.Attribute object at 0x7da18f58fa90>, <ast.Attribute object at 0x7da1b06fd900>, <ast.Attribute object at 0x7da1b06fd8a0>]]][name[type_]]]
if <ast.BoolOp object at 0x7da1b06fd960> begin[:]
return[name[type_]]
return[name[cls].unknown] | keyword[def] identifier[to_signed] ( identifier[cls] , identifier[type_] ):
literal[string]
keyword[if] identifier[type_] keyword[in] identifier[cls] . identifier[unsigned] :
keyword[return] { identifier[TYPE] . identifier[ubyte] : identifier[TYPE] . identifier[byte_] ,
identifier[TYPE] . identifier[uinteger] : identifier[TYPE] . identifier[integer] ,
identifier[TYPE] . identifier[ulong] : identifier[TYPE] . identifier[long_] }[ identifier[type_] ]
keyword[if] identifier[type_] keyword[in] identifier[cls] . identifier[decimals] keyword[or] identifier[type_] keyword[in] identifier[cls] . identifier[signed] :
keyword[return] identifier[type_]
keyword[return] identifier[cls] . identifier[unknown] | def to_signed(cls, type_):
""" Return signed type or equivalent
"""
if type_ in cls.unsigned:
return {TYPE.ubyte: TYPE.byte_, TYPE.uinteger: TYPE.integer, TYPE.ulong: TYPE.long_}[type_] # depends on [control=['if'], data=['type_']]
if type_ in cls.decimals or type_ in cls.signed:
return type_ # depends on [control=['if'], data=[]]
return cls.unknown |
def c_transform_entropic(b, M, reg, beta):
'''
The goal is to recover u from the c-transform.
The function computes the c_transform of a dual variable from the other
dual variable:
.. math::
u = v^{c,reg} = -reg \sum_j exp((v - M)/reg) b_j
Where :
- M is the (ns,nt) metric cost matrix
- u, v are dual variables in R^IxR^J
- reg is the regularization term
It is used to recover an optimal u from optimal v solving the semi dual
problem, see Proposition 2.1 of [18]_
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float
regularization term > 0
v : np.ndarray(nt,)
dual variable
Returns
-------
u : np.ndarray(ns,)
dual variable
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
'''
n_source = np.shape(M)[0]
alpha = np.zeros(n_source)
for i in range(n_source):
r = M[i, :] - beta
min_r = np.min(r)
exp_beta = np.exp(-(r - min_r) / reg) * b
alpha[i] = min_r - reg * np.log(np.sum(exp_beta))
return alpha | def function[c_transform_entropic, parameter[b, M, reg, beta]]:
constant[
The goal is to recover u from the c-transform.
The function computes the c_transform of a dual variable from the other
dual variable:
.. math::
u = v^{c,reg} = -reg \sum_j exp((v - M)/reg) b_j
Where :
- M is the (ns,nt) metric cost matrix
- u, v are dual variables in R^IxR^J
- reg is the regularization term
It is used to recover an optimal u from optimal v solving the semi dual
problem, see Proposition 2.1 of [18]_
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float
regularization term > 0
v : np.ndarray(nt,)
dual variable
Returns
-------
u : np.ndarray(ns,)
dual variable
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
]
variable[n_source] assign[=] call[call[name[np].shape, parameter[name[M]]]][constant[0]]
variable[alpha] assign[=] call[name[np].zeros, parameter[name[n_source]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_source]]]] begin[:]
variable[r] assign[=] binary_operation[call[name[M]][tuple[[<ast.Name object at 0x7da1b1638b80>, <ast.Slice object at 0x7da1b163a7d0>]]] - name[beta]]
variable[min_r] assign[=] call[name[np].min, parameter[name[r]]]
variable[exp_beta] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b18dfca0> / name[reg]]]] * name[b]]
call[name[alpha]][name[i]] assign[=] binary_operation[name[min_r] - binary_operation[name[reg] * call[name[np].log, parameter[call[name[np].sum, parameter[name[exp_beta]]]]]]]
return[name[alpha]] | keyword[def] identifier[c_transform_entropic] ( identifier[b] , identifier[M] , identifier[reg] , identifier[beta] ):
literal[string]
identifier[n_source] = identifier[np] . identifier[shape] ( identifier[M] )[ literal[int] ]
identifier[alpha] = identifier[np] . identifier[zeros] ( identifier[n_source] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_source] ):
identifier[r] = identifier[M] [ identifier[i] ,:]- identifier[beta]
identifier[min_r] = identifier[np] . identifier[min] ( identifier[r] )
identifier[exp_beta] = identifier[np] . identifier[exp] (-( identifier[r] - identifier[min_r] )/ identifier[reg] )* identifier[b]
identifier[alpha] [ identifier[i] ]= identifier[min_r] - identifier[reg] * identifier[np] . identifier[log] ( identifier[np] . identifier[sum] ( identifier[exp_beta] ))
keyword[return] identifier[alpha] | def c_transform_entropic(b, M, reg, beta):
"""
The goal is to recover u from the c-transform.
The function computes the c_transform of a dual variable from the other
dual variable:
.. math::
u = v^{c,reg} = -reg \\sum_j exp((v - M)/reg) b_j
Where :
- M is the (ns,nt) metric cost matrix
- u, v are dual variables in R^IxR^J
- reg is the regularization term
It is used to recover an optimal u from optimal v solving the semi dual
problem, see Proposition 2.1 of [18]_
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float
regularization term > 0
v : np.ndarray(nt,)
dual variable
Returns
-------
u : np.ndarray(ns,)
dual variable
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
"""
n_source = np.shape(M)[0]
alpha = np.zeros(n_source)
for i in range(n_source):
r = M[i, :] - beta
min_r = np.min(r)
exp_beta = np.exp(-(r - min_r) / reg) * b
alpha[i] = min_r - reg * np.log(np.sum(exp_beta)) # depends on [control=['for'], data=['i']]
return alpha |
def get(self, twig=None, check_visible=True, check_default=True, **kwargs):
"""
Get a single parameter from this ParameterSet. This works exactly the
same as filter except there must be only a single result, and the Parameter
itself is returned instead of a ParameterSet.
Also see :meth:`get_parameter` (which is simply an alias of this method)
:parameter str twig: (optional) the search twig - essentially a single
string with any delimiter (ie '@') that will be parsed
into any of the meta-tags. Example: instead of
b.filter(context='component', component='starA'), you
could do b.filter('starA@component').
:parameter bool check_visible: whether to hide invisible
parameters. These are usually parameters that do not
play a role unless the value of another parameter meets
some condition.
:parameter bool check_default: whether to exclude parameters which
have a _default tag (these are parameters which solely exist
to provide defaults for when new parameters or datasets are
added and the parameter needs to be copied appropriately).
Defaults to True.
:parameter **kwargs: meta-tags to search (ie. 'context', 'component',
'model', etc). See :func:`meta` for all possible options.
:return: the resulting :class:`Parameter`
:raises ValueError: if either 0 or more than 1 results are found
matching the search.
"""
kwargs['check_visible'] = check_visible
kwargs['check_default'] = check_default
# print "***", kwargs
ps = self.filter(twig=twig, **kwargs)
if not len(ps):
# TODO: custom exception?
raise ValueError("0 results found")
elif len(ps) != 1:
# TODO: custom exception?
raise ValueError("{} results found: {}".format(len(ps), ps.twigs))
else:
# then only 1 item, so return the parameter
return ps._params[0] | def function[get, parameter[self, twig, check_visible, check_default]]:
constant[
Get a single parameter from this ParameterSet. This works exactly the
same as filter except there must be only a single result, and the Parameter
itself is returned instead of a ParameterSet.
Also see :meth:`get_parameter` (which is simply an alias of this method)
:parameter str twig: (optional) the search twig - essentially a single
string with any delimiter (ie '@') that will be parsed
into any of the meta-tags. Example: instead of
b.filter(context='component', component='starA'), you
could do b.filter('starA@component').
:parameter bool check_visible: whether to hide invisible
parameters. These are usually parameters that do not
play a role unless the value of another parameter meets
some condition.
:parameter bool check_default: whether to exclude parameters which
have a _default tag (these are parameters which solely exist
to provide defaults for when new parameters or datasets are
added and the parameter needs to be copied appropriately).
Defaults to True.
:parameter **kwargs: meta-tags to search (ie. 'context', 'component',
'model', etc). See :func:`meta` for all possible options.
:return: the resulting :class:`Parameter`
:raises ValueError: if either 0 or more than 1 results are found
matching the search.
]
call[name[kwargs]][constant[check_visible]] assign[=] name[check_visible]
call[name[kwargs]][constant[check_default]] assign[=] name[check_default]
variable[ps] assign[=] call[name[self].filter, parameter[]]
if <ast.UnaryOp object at 0x7da18ede43d0> begin[:]
<ast.Raise object at 0x7da18ede6140> | keyword[def] identifier[get] ( identifier[self] , identifier[twig] = keyword[None] , identifier[check_visible] = keyword[True] , identifier[check_default] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[check_visible]
identifier[kwargs] [ literal[string] ]= identifier[check_default]
identifier[ps] = identifier[self] . identifier[filter] ( identifier[twig] = identifier[twig] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[len] ( identifier[ps] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[len] ( identifier[ps] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[len] ( identifier[ps] ), identifier[ps] . identifier[twigs] ))
keyword[else] :
keyword[return] identifier[ps] . identifier[_params] [ literal[int] ] | def get(self, twig=None, check_visible=True, check_default=True, **kwargs):
"""
Get a single parameter from this ParameterSet. This works exactly the
same as filter except there must be only a single result, and the Parameter
itself is returned instead of a ParameterSet.
Also see :meth:`get_parameter` (which is simply an alias of this method)
:parameter str twig: (optional) the search twig - essentially a single
string with any delimiter (ie '@') that will be parsed
into any of the meta-tags. Example: instead of
b.filter(context='component', component='starA'), you
could do b.filter('starA@component').
:parameter bool check_visible: whether to hide invisible
parameters. These are usually parameters that do not
play a role unless the value of another parameter meets
some condition.
:parameter bool check_default: whether to exclude parameters which
have a _default tag (these are parameters which solely exist
to provide defaults for when new parameters or datasets are
added and the parameter needs to be copied appropriately).
Defaults to True.
:parameter **kwargs: meta-tags to search (ie. 'context', 'component',
'model', etc). See :func:`meta` for all possible options.
:return: the resulting :class:`Parameter`
:raises ValueError: if either 0 or more than 1 results are found
matching the search.
"""
kwargs['check_visible'] = check_visible
kwargs['check_default'] = check_default
# print "***", kwargs
ps = self.filter(twig=twig, **kwargs)
if not len(ps):
# TODO: custom exception?
raise ValueError('0 results found') # depends on [control=['if'], data=[]]
elif len(ps) != 1:
# TODO: custom exception?
raise ValueError('{} results found: {}'.format(len(ps), ps.twigs)) # depends on [control=['if'], data=[]]
else:
# then only 1 item, so return the parameter
return ps._params[0] |
def get_comics(self, *args, **kwargs):
"""
Returns a full ComicDataWrapper object for this creator.
/creators/{creatorId}/comics
:returns: ComicDataWrapper -- A new request to API. Contains full results set.
"""
from .comic import Comic, ComicDataWrapper
return self.get_related_resource(Comic, ComicDataWrapper, args, kwargs) | def function[get_comics, parameter[self]]:
constant[
Returns a full ComicDataWrapper object for this creator.
/creators/{creatorId}/comics
:returns: ComicDataWrapper -- A new request to API. Contains full results set.
]
from relative_module[comic] import module[Comic], module[ComicDataWrapper]
return[call[name[self].get_related_resource, parameter[name[Comic], name[ComicDataWrapper], name[args], name[kwargs]]]] | keyword[def] identifier[get_comics] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[comic] keyword[import] identifier[Comic] , identifier[ComicDataWrapper]
keyword[return] identifier[self] . identifier[get_related_resource] ( identifier[Comic] , identifier[ComicDataWrapper] , identifier[args] , identifier[kwargs] ) | def get_comics(self, *args, **kwargs):
"""
Returns a full ComicDataWrapper object for this creator.
/creators/{creatorId}/comics
:returns: ComicDataWrapper -- A new request to API. Contains full results set.
"""
from .comic import Comic, ComicDataWrapper
return self.get_related_resource(Comic, ComicDataWrapper, args, kwargs) |
def user_lookup(self, cloudflare_email=None, unique_id=None):
"""
Lookup user data based on either his cloudflare_email or his
unique_id.
:param cloudflare_email: email associated with user
:type cloudflare_email: str
:param unique_id: unique id associated with user
:type unique_id: str
:returns:
:rtype: dict
"""
if not cloudflare_email and not unique_id:
raise KeyError(
'Either cloudflare_email or unique_id must be present')
params = {'act': 'user_lookup'}
if cloudflare_email:
params['cloudflare_email'] = cloudflare_email
else:
params['unique_id'] = unique_id
return self._request(params) | def function[user_lookup, parameter[self, cloudflare_email, unique_id]]:
constant[
Lookup user data based on either his cloudflare_email or his
unique_id.
:param cloudflare_email: email associated with user
:type cloudflare_email: str
:param unique_id: unique id associated with user
:type unique_id: str
:returns:
:rtype: dict
]
if <ast.BoolOp object at 0x7da20cabc640> begin[:]
<ast.Raise object at 0x7da20cabe170>
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20cabcb20>], [<ast.Constant object at 0x7da20cabe380>]]
if name[cloudflare_email] begin[:]
call[name[params]][constant[cloudflare_email]] assign[=] name[cloudflare_email]
return[call[name[self]._request, parameter[name[params]]]] | keyword[def] identifier[user_lookup] ( identifier[self] , identifier[cloudflare_email] = keyword[None] , identifier[unique_id] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[cloudflare_email] keyword[and] keyword[not] identifier[unique_id] :
keyword[raise] identifier[KeyError] (
literal[string] )
identifier[params] ={ literal[string] : literal[string] }
keyword[if] identifier[cloudflare_email] :
identifier[params] [ literal[string] ]= identifier[cloudflare_email]
keyword[else] :
identifier[params] [ literal[string] ]= identifier[unique_id]
keyword[return] identifier[self] . identifier[_request] ( identifier[params] ) | def user_lookup(self, cloudflare_email=None, unique_id=None):
"""
Lookup user data based on either his cloudflare_email or his
unique_id.
:param cloudflare_email: email associated with user
:type cloudflare_email: str
:param unique_id: unique id associated with user
:type unique_id: str
:returns:
:rtype: dict
"""
if not cloudflare_email and (not unique_id):
raise KeyError('Either cloudflare_email or unique_id must be present') # depends on [control=['if'], data=[]]
params = {'act': 'user_lookup'}
if cloudflare_email:
params['cloudflare_email'] = cloudflare_email # depends on [control=['if'], data=[]]
else:
params['unique_id'] = unique_id
return self._request(params) |
def infer_columns(selectx,tables_dict):
"""infer the columns for a subselect that creates an implicit table.
the output of this *can* contain duplicate names, fingers crossed that downstream code uses the first.
(Look up SQL spec on dupe names.)
todo(refactor): I think there's common logic here and inside NameIndexer that can be merged.
todo(ugly): this is a beast
"""
# todo: support CTEs -- with all this plumbing, might as well
table2fields = {}
table_order = []
for t in selectx.tables:
if isinstance(t,basestring):
table2fields[t]=tables_dict[t].fields
table_order.append(t)
elif isinstance(t,sqparse2.AliasX):
if isinstance(t.name,basestring):
table2fields[t]=tables_dict[t]
table_order.append(t.name)
elif isinstance(t.name,sqparse2.SelectX): raise NotImplementedError('todo: inner subquery')
else: raise TypeError('AliasX.name',type(t.name))
else: raise TypeError('table',type(t))
# the forms are: *, x.*, x.y, y. expressions are anonymous unless they have an 'as' (which I don't support)
table_order=uniqify(table_order)
cols=[]
used_name_collision = collections.Counter()
for col in selectx.cols.children:
if isinstance(col,sqparse2.AsterX):
for t in table_order:
cols.extend(table2fields[t])
elif isinstance(col,sqparse2.BaseX):
all_paths = treepath.sub_slots(col, lambda x:isinstance(x,(sqparse2.AttrX,sqparse2.NameX,sqparse2.AliasX)), match=True)
paths = eliminate_sequential_children(all_paths) # this eliminates NameX under AttrX
for p in paths:
x = col[p]
if isinstance(x,sqparse2.AttrX):
if not isinstance(x.parent,sqparse2.NameX): raise TypeError('parent_not_name',type(x.parent))
if isinstance(x.attr,sqparse2.NameX): raise NotImplementedError # todo
elif isinstance(x.attr,sqparse2.AsterX): cols.extend(table2fields[x.parent.name])
else: raise TypeError('attr_unk_type',type(x.attr))
elif isinstance(x,sqparse2.NameX):
matching_fields = filter(None,(next((f for f in table2fields[t] if f.name==x.name),None) for t in table_order))
if len(matching_fields)!=1: raise sqparse2.SQLSyntaxError('missing_or_dupe_field',x,matching_fields)
cols.append(matching_fields[0])
elif isinstance(x,sqparse2.AliasX): cols.append(sqparse2.ColX(x.alias,None,None,None,None,None))
else: raise TypeError('unk_item_type',type(x)) # pragma: no cover
else: raise TypeError('unk_col_type',type(col)) # pragma: no cover
return cols | def function[infer_columns, parameter[selectx, tables_dict]]:
constant[infer the columns for a subselect that creates an implicit table.
the output of this *can* contain duplicate names, fingers crossed that downstream code uses the first.
(Look up SQL spec on dupe names.)
todo(refactor): I think there's common logic here and inside NameIndexer that can be merged.
todo(ugly): this is a beast
]
variable[table2fields] assign[=] dictionary[[], []]
variable[table_order] assign[=] list[[]]
for taget[name[t]] in starred[name[selectx].tables] begin[:]
if call[name[isinstance], parameter[name[t], name[basestring]]] begin[:]
call[name[table2fields]][name[t]] assign[=] call[name[tables_dict]][name[t]].fields
call[name[table_order].append, parameter[name[t]]]
variable[table_order] assign[=] call[name[uniqify], parameter[name[table_order]]]
variable[cols] assign[=] list[[]]
variable[used_name_collision] assign[=] call[name[collections].Counter, parameter[]]
for taget[name[col]] in starred[name[selectx].cols.children] begin[:]
if call[name[isinstance], parameter[name[col], name[sqparse2].AsterX]] begin[:]
for taget[name[t]] in starred[name[table_order]] begin[:]
call[name[cols].extend, parameter[call[name[table2fields]][name[t]]]]
return[name[cols]] | keyword[def] identifier[infer_columns] ( identifier[selectx] , identifier[tables_dict] ):
literal[string]
identifier[table2fields] ={}
identifier[table_order] =[]
keyword[for] identifier[t] keyword[in] identifier[selectx] . identifier[tables] :
keyword[if] identifier[isinstance] ( identifier[t] , identifier[basestring] ):
identifier[table2fields] [ identifier[t] ]= identifier[tables_dict] [ identifier[t] ]. identifier[fields]
identifier[table_order] . identifier[append] ( identifier[t] )
keyword[elif] identifier[isinstance] ( identifier[t] , identifier[sqparse2] . identifier[AliasX] ):
keyword[if] identifier[isinstance] ( identifier[t] . identifier[name] , identifier[basestring] ):
identifier[table2fields] [ identifier[t] ]= identifier[tables_dict] [ identifier[t] ]
identifier[table_order] . identifier[append] ( identifier[t] . identifier[name] )
keyword[elif] identifier[isinstance] ( identifier[t] . identifier[name] , identifier[sqparse2] . identifier[SelectX] ): keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[t] . identifier[name] ))
keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[t] ))
identifier[table_order] = identifier[uniqify] ( identifier[table_order] )
identifier[cols] =[]
identifier[used_name_collision] = identifier[collections] . identifier[Counter] ()
keyword[for] identifier[col] keyword[in] identifier[selectx] . identifier[cols] . identifier[children] :
keyword[if] identifier[isinstance] ( identifier[col] , identifier[sqparse2] . identifier[AsterX] ):
keyword[for] identifier[t] keyword[in] identifier[table_order] :
identifier[cols] . identifier[extend] ( identifier[table2fields] [ identifier[t] ])
keyword[elif] identifier[isinstance] ( identifier[col] , identifier[sqparse2] . identifier[BaseX] ):
identifier[all_paths] = identifier[treepath] . identifier[sub_slots] ( identifier[col] , keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] ,( identifier[sqparse2] . identifier[AttrX] , identifier[sqparse2] . identifier[NameX] , identifier[sqparse2] . identifier[AliasX] )), identifier[match] = keyword[True] )
identifier[paths] = identifier[eliminate_sequential_children] ( identifier[all_paths] )
keyword[for] identifier[p] keyword[in] identifier[paths] :
identifier[x] = identifier[col] [ identifier[p] ]
keyword[if] identifier[isinstance] ( identifier[x] , identifier[sqparse2] . identifier[AttrX] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[x] . identifier[parent] , identifier[sqparse2] . identifier[NameX] ): keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[x] . identifier[parent] ))
keyword[if] identifier[isinstance] ( identifier[x] . identifier[attr] , identifier[sqparse2] . identifier[NameX] ): keyword[raise] identifier[NotImplementedError]
keyword[elif] identifier[isinstance] ( identifier[x] . identifier[attr] , identifier[sqparse2] . identifier[AsterX] ): identifier[cols] . identifier[extend] ( identifier[table2fields] [ identifier[x] . identifier[parent] . identifier[name] ])
keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[x] . identifier[attr] ))
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[sqparse2] . identifier[NameX] ):
identifier[matching_fields] = identifier[filter] ( keyword[None] ,( identifier[next] (( identifier[f] keyword[for] identifier[f] keyword[in] identifier[table2fields] [ identifier[t] ] keyword[if] identifier[f] . identifier[name] == identifier[x] . identifier[name] ), keyword[None] ) keyword[for] identifier[t] keyword[in] identifier[table_order] ))
keyword[if] identifier[len] ( identifier[matching_fields] )!= literal[int] : keyword[raise] identifier[sqparse2] . identifier[SQLSyntaxError] ( literal[string] , identifier[x] , identifier[matching_fields] )
identifier[cols] . identifier[append] ( identifier[matching_fields] [ literal[int] ])
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[sqparse2] . identifier[AliasX] ): identifier[cols] . identifier[append] ( identifier[sqparse2] . identifier[ColX] ( identifier[x] . identifier[alias] , keyword[None] , keyword[None] , keyword[None] , keyword[None] , keyword[None] ))
keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[x] ))
keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[col] ))
keyword[return] identifier[cols] | def infer_columns(selectx, tables_dict):
"""infer the columns for a subselect that creates an implicit table.
the output of this *can* contain duplicate names, fingers crossed that downstream code uses the first.
(Look up SQL spec on dupe names.)
todo(refactor): I think there's common logic here and inside NameIndexer that can be merged.
todo(ugly): this is a beast
"""
# todo: support CTEs -- with all this plumbing, might as well
table2fields = {}
table_order = []
for t in selectx.tables:
if isinstance(t, basestring):
table2fields[t] = tables_dict[t].fields
table_order.append(t) # depends on [control=['if'], data=[]]
elif isinstance(t, sqparse2.AliasX):
if isinstance(t.name, basestring):
table2fields[t] = tables_dict[t]
table_order.append(t.name) # depends on [control=['if'], data=[]]
elif isinstance(t.name, sqparse2.SelectX):
raise NotImplementedError('todo: inner subquery') # depends on [control=['if'], data=[]]
else:
raise TypeError('AliasX.name', type(t.name)) # depends on [control=['if'], data=[]]
else:
raise TypeError('table', type(t)) # depends on [control=['for'], data=['t']]
# the forms are: *, x.*, x.y, y. expressions are anonymous unless they have an 'as' (which I don't support)
table_order = uniqify(table_order)
cols = []
used_name_collision = collections.Counter()
for col in selectx.cols.children:
if isinstance(col, sqparse2.AsterX):
for t in table_order:
cols.extend(table2fields[t]) # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]]
elif isinstance(col, sqparse2.BaseX):
all_paths = treepath.sub_slots(col, lambda x: isinstance(x, (sqparse2.AttrX, sqparse2.NameX, sqparse2.AliasX)), match=True)
paths = eliminate_sequential_children(all_paths) # this eliminates NameX under AttrX
for p in paths:
x = col[p]
if isinstance(x, sqparse2.AttrX):
if not isinstance(x.parent, sqparse2.NameX):
raise TypeError('parent_not_name', type(x.parent)) # depends on [control=['if'], data=[]]
if isinstance(x.attr, sqparse2.NameX):
raise NotImplementedError # todo # depends on [control=['if'], data=[]]
elif isinstance(x.attr, sqparse2.AsterX):
cols.extend(table2fields[x.parent.name]) # depends on [control=['if'], data=[]]
else:
raise TypeError('attr_unk_type', type(x.attr)) # depends on [control=['if'], data=[]]
elif isinstance(x, sqparse2.NameX):
matching_fields = filter(None, (next((f for f in table2fields[t] if f.name == x.name), None) for t in table_order))
if len(matching_fields) != 1:
raise sqparse2.SQLSyntaxError('missing_or_dupe_field', x, matching_fields) # depends on [control=['if'], data=[]]
cols.append(matching_fields[0]) # depends on [control=['if'], data=[]]
elif isinstance(x, sqparse2.AliasX):
cols.append(sqparse2.ColX(x.alias, None, None, None, None, None)) # depends on [control=['if'], data=[]]
else:
raise TypeError('unk_item_type', type(x)) # pragma: no cover # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]]
else:
raise TypeError('unk_col_type', type(col)) # pragma: no cover # depends on [control=['for'], data=['col']]
return cols |
def upload_file(self, project_name, local_path, remote_path=None):
"""
Upload a file into project creating a new version if it already exists.
Will also create project and parent folders if they do not exist.
:param project_name: str: name of the project to upload a file to
:param local_path: str: path to download the file into
:param remote_path: str: remote path specifying file to upload to (defaults to local_path basename)
"""
project = self._get_or_create_project(project_name)
file_upload = FileUpload(project, remote_path, local_path)
file_upload.run() | def function[upload_file, parameter[self, project_name, local_path, remote_path]]:
constant[
Upload a file into project creating a new version if it already exists.
Will also create project and parent folders if they do not exist.
:param project_name: str: name of the project to upload a file to
:param local_path: str: path to download the file into
:param remote_path: str: remote path specifying file to upload to (defaults to local_path basename)
]
variable[project] assign[=] call[name[self]._get_or_create_project, parameter[name[project_name]]]
variable[file_upload] assign[=] call[name[FileUpload], parameter[name[project], name[remote_path], name[local_path]]]
call[name[file_upload].run, parameter[]] | keyword[def] identifier[upload_file] ( identifier[self] , identifier[project_name] , identifier[local_path] , identifier[remote_path] = keyword[None] ):
literal[string]
identifier[project] = identifier[self] . identifier[_get_or_create_project] ( identifier[project_name] )
identifier[file_upload] = identifier[FileUpload] ( identifier[project] , identifier[remote_path] , identifier[local_path] )
identifier[file_upload] . identifier[run] () | def upload_file(self, project_name, local_path, remote_path=None):
"""
Upload a file into project creating a new version if it already exists.
Will also create project and parent folders if they do not exist.
:param project_name: str: name of the project to upload a file to
:param local_path: str: path to download the file into
:param remote_path: str: remote path specifying file to upload to (defaults to local_path basename)
"""
project = self._get_or_create_project(project_name)
file_upload = FileUpload(project, remote_path, local_path)
file_upload.run() |
def log_to_DataFrame(execution_history_items, data_in_columns=[], data_out_columns=[], scoped_in_columns=[],
scoped_out_columns=[], semantic_data_columns=[], throw_on_pickle_error=True):
"""
Returns all collapsed items in a table-like structure (pandas.DataFrame) with one row per executed
state and a set of properties resp. columns (e.g. state_name, outcome, run_id) for this state.
The data flow (data_in/out, scoped_data_in/out, semantic_data) is omitted from this table
representation by default, as the different states have different data in-/out-port, scoped_data-
ports and semantic_data defined. However, you can ask specific data-/scoped_data-ports and semantic
data to be exported as table column, given they are primitive-valued, by including the port / key
names in the *_selected-parameters. These table-columns will obviously only be well-defined for
states having this kind of port-name-/semantic-key and otherwise will contain a None-like value,
indicating missing data.
The available data per execution item (row in the table) can be printed using pandas.DataFrame.columns.
"""
try:
import pandas as pd
except ImportError:
raise ImportError("The Python package 'pandas' is required for log_to_DataFrame.")
start, next_, concurrency, hierarchy, gitems = log_to_collapsed_structure(
execution_history_items, throw_on_pickle_error=throw_on_pickle_error)
gitems.pop(start['run_id'])
if len(gitems) == 0:
return pd.DataFrame()
# remove columns which are not generic over all states (basically the
# data flow stuff)
df_keys = list(list(gitems.values())[0].keys())
df_keys.remove('data_ins')
df_keys.remove('data_outs')
df_keys.remove('scoped_data_ins')
df_keys.remove('scoped_data_outs')
df_keys.remove('semantic_data')
df_keys.sort()
df_items = []
for rid, item in gitems.items():
row_data = [item[k] for k in df_keys]
for key, selected_columns in [('data_ins', data_in_columns),
('data_outs', data_out_columns),
('scoped_data_ins', scoped_in_columns),
('scoped_data_outs', scoped_out_columns),
('semantic_data', semantic_data_columns)]:
for column_key in selected_columns:
row_data.append(item[key].get(column_key, None))
df_items.append(row_data)
for key, selected_columns in [('data_ins', data_in_columns),
('data_outs', data_out_columns),
('scoped_data_ins', scoped_in_columns),
('scoped_data_outs', scoped_out_columns),
('semantic_data', semantic_data_columns)]:
df_keys.extend([key + '__' + s for s in selected_columns])
df = pd.DataFrame(df_items, columns=df_keys)
# convert epoch to datetime
df.timestamp_call = pd.to_datetime(df.timestamp_call, unit='s')
df.timestamp_return = pd.to_datetime(df.timestamp_return, unit='s')
# use call timestamp as index
df_timed = df.set_index(df.timestamp_call)
df_timed.sort_index(inplace=True)
return df_timed | def function[log_to_DataFrame, parameter[execution_history_items, data_in_columns, data_out_columns, scoped_in_columns, scoped_out_columns, semantic_data_columns, throw_on_pickle_error]]:
constant[
Returns all collapsed items in a table-like structure (pandas.DataFrame) with one row per executed
state and a set of properties resp. columns (e.g. state_name, outcome, run_id) for this state.
The data flow (data_in/out, scoped_data_in/out, semantic_data) is omitted from this table
representation by default, as the different states have different data in-/out-port, scoped_data-
ports and semantic_data defined. However, you can ask specific data-/scoped_data-ports and semantic
data to be exported as table column, given they are primitive-valued, by including the port / key
names in the *_selected-parameters. These table-columns will obviously only be well-defined for
states having this kind of port-name-/semantic-key and otherwise will contain a None-like value,
indicating missing data.
The available data per execution item (row in the table) can be printed using pandas.DataFrame.columns.
]
<ast.Try object at 0x7da1b192fb50>
<ast.Tuple object at 0x7da1b192d2a0> assign[=] call[name[log_to_collapsed_structure], parameter[name[execution_history_items]]]
call[name[gitems].pop, parameter[call[name[start]][constant[run_id]]]]
if compare[call[name[len], parameter[name[gitems]]] equal[==] constant[0]] begin[:]
return[call[name[pd].DataFrame, parameter[]]]
variable[df_keys] assign[=] call[name[list], parameter[call[call[call[name[list], parameter[call[name[gitems].values, parameter[]]]]][constant[0]].keys, parameter[]]]]
call[name[df_keys].remove, parameter[constant[data_ins]]]
call[name[df_keys].remove, parameter[constant[data_outs]]]
call[name[df_keys].remove, parameter[constant[scoped_data_ins]]]
call[name[df_keys].remove, parameter[constant[scoped_data_outs]]]
call[name[df_keys].remove, parameter[constant[semantic_data]]]
call[name[df_keys].sort, parameter[]]
variable[df_items] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1a28d30>, <ast.Name object at 0x7da1b1a2a290>]]] in starred[call[name[gitems].items, parameter[]]] begin[:]
variable[row_data] assign[=] <ast.ListComp object at 0x7da1b1a2ada0>
for taget[tuple[[<ast.Name object at 0x7da1b1a2b1f0>, <ast.Name object at 0x7da1b1a2a7a0>]]] in starred[list[[<ast.Tuple object at 0x7da1b1a2ab60>, <ast.Tuple object at 0x7da1b1a2a350>, <ast.Tuple object at 0x7da1b1a2bac0>, <ast.Tuple object at 0x7da1b1a28e50>, <ast.Tuple object at 0x7da1b1a29e40>]]] begin[:]
for taget[name[column_key]] in starred[name[selected_columns]] begin[:]
call[name[row_data].append, parameter[call[call[name[item]][name[key]].get, parameter[name[column_key], constant[None]]]]]
call[name[df_items].append, parameter[name[row_data]]]
for taget[tuple[[<ast.Name object at 0x7da1b1a2b310>, <ast.Name object at 0x7da1b1a29480>]]] in starred[list[[<ast.Tuple object at 0x7da1b1a298a0>, <ast.Tuple object at 0x7da1b1a2a020>, <ast.Tuple object at 0x7da1b1a29750>, <ast.Tuple object at 0x7da1b1a28100>, <ast.Tuple object at 0x7da1b1a2ba30>]]] begin[:]
call[name[df_keys].extend, parameter[<ast.ListComp object at 0x7da2044c2830>]]
variable[df] assign[=] call[name[pd].DataFrame, parameter[name[df_items]]]
name[df].timestamp_call assign[=] call[name[pd].to_datetime, parameter[name[df].timestamp_call]]
name[df].timestamp_return assign[=] call[name[pd].to_datetime, parameter[name[df].timestamp_return]]
variable[df_timed] assign[=] call[name[df].set_index, parameter[name[df].timestamp_call]]
call[name[df_timed].sort_index, parameter[]]
return[name[df_timed]] | keyword[def] identifier[log_to_DataFrame] ( identifier[execution_history_items] , identifier[data_in_columns] =[], identifier[data_out_columns] =[], identifier[scoped_in_columns] =[],
identifier[scoped_out_columns] =[], identifier[semantic_data_columns] =[], identifier[throw_on_pickle_error] = keyword[True] ):
literal[string]
keyword[try] :
keyword[import] identifier[pandas] keyword[as] identifier[pd]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string] )
identifier[start] , identifier[next_] , identifier[concurrency] , identifier[hierarchy] , identifier[gitems] = identifier[log_to_collapsed_structure] (
identifier[execution_history_items] , identifier[throw_on_pickle_error] = identifier[throw_on_pickle_error] )
identifier[gitems] . identifier[pop] ( identifier[start] [ literal[string] ])
keyword[if] identifier[len] ( identifier[gitems] )== literal[int] :
keyword[return] identifier[pd] . identifier[DataFrame] ()
identifier[df_keys] = identifier[list] ( identifier[list] ( identifier[gitems] . identifier[values] ())[ literal[int] ]. identifier[keys] ())
identifier[df_keys] . identifier[remove] ( literal[string] )
identifier[df_keys] . identifier[remove] ( literal[string] )
identifier[df_keys] . identifier[remove] ( literal[string] )
identifier[df_keys] . identifier[remove] ( literal[string] )
identifier[df_keys] . identifier[remove] ( literal[string] )
identifier[df_keys] . identifier[sort] ()
identifier[df_items] =[]
keyword[for] identifier[rid] , identifier[item] keyword[in] identifier[gitems] . identifier[items] ():
identifier[row_data] =[ identifier[item] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[df_keys] ]
keyword[for] identifier[key] , identifier[selected_columns] keyword[in] [( literal[string] , identifier[data_in_columns] ),
( literal[string] , identifier[data_out_columns] ),
( literal[string] , identifier[scoped_in_columns] ),
( literal[string] , identifier[scoped_out_columns] ),
( literal[string] , identifier[semantic_data_columns] )]:
keyword[for] identifier[column_key] keyword[in] identifier[selected_columns] :
identifier[row_data] . identifier[append] ( identifier[item] [ identifier[key] ]. identifier[get] ( identifier[column_key] , keyword[None] ))
identifier[df_items] . identifier[append] ( identifier[row_data] )
keyword[for] identifier[key] , identifier[selected_columns] keyword[in] [( literal[string] , identifier[data_in_columns] ),
( literal[string] , identifier[data_out_columns] ),
( literal[string] , identifier[scoped_in_columns] ),
( literal[string] , identifier[scoped_out_columns] ),
( literal[string] , identifier[semantic_data_columns] )]:
identifier[df_keys] . identifier[extend] ([ identifier[key] + literal[string] + identifier[s] keyword[for] identifier[s] keyword[in] identifier[selected_columns] ])
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[df_items] , identifier[columns] = identifier[df_keys] )
identifier[df] . identifier[timestamp_call] = identifier[pd] . identifier[to_datetime] ( identifier[df] . identifier[timestamp_call] , identifier[unit] = literal[string] )
identifier[df] . identifier[timestamp_return] = identifier[pd] . identifier[to_datetime] ( identifier[df] . identifier[timestamp_return] , identifier[unit] = literal[string] )
identifier[df_timed] = identifier[df] . identifier[set_index] ( identifier[df] . identifier[timestamp_call] )
identifier[df_timed] . identifier[sort_index] ( identifier[inplace] = keyword[True] )
keyword[return] identifier[df_timed] | def log_to_DataFrame(execution_history_items, data_in_columns=[], data_out_columns=[], scoped_in_columns=[], scoped_out_columns=[], semantic_data_columns=[], throw_on_pickle_error=True):
"""
Returns all collapsed items in a table-like structure (pandas.DataFrame) with one row per executed
state and a set of properties resp. columns (e.g. state_name, outcome, run_id) for this state.
The data flow (data_in/out, scoped_data_in/out, semantic_data) is omitted from this table
representation by default, as the different states have different data in-/out-port, scoped_data-
ports and semantic_data defined. However, you can ask specific data-/scoped_data-ports and semantic
data to be exported as table column, given they are primitive-valued, by including the port / key
names in the *_selected-parameters. These table-columns will obviously only be well-defined for
states having this kind of port-name-/semantic-key and otherwise will contain a None-like value,
indicating missing data.
The available data per execution item (row in the table) can be printed using pandas.DataFrame.columns.
"""
try:
import pandas as pd # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError("The Python package 'pandas' is required for log_to_DataFrame.") # depends on [control=['except'], data=[]]
(start, next_, concurrency, hierarchy, gitems) = log_to_collapsed_structure(execution_history_items, throw_on_pickle_error=throw_on_pickle_error)
gitems.pop(start['run_id'])
if len(gitems) == 0:
return pd.DataFrame() # depends on [control=['if'], data=[]]
# remove columns which are not generic over all states (basically the
# data flow stuff)
df_keys = list(list(gitems.values())[0].keys())
df_keys.remove('data_ins')
df_keys.remove('data_outs')
df_keys.remove('scoped_data_ins')
df_keys.remove('scoped_data_outs')
df_keys.remove('semantic_data')
df_keys.sort()
df_items = []
for (rid, item) in gitems.items():
row_data = [item[k] for k in df_keys]
for (key, selected_columns) in [('data_ins', data_in_columns), ('data_outs', data_out_columns), ('scoped_data_ins', scoped_in_columns), ('scoped_data_outs', scoped_out_columns), ('semantic_data', semantic_data_columns)]:
for column_key in selected_columns:
row_data.append(item[key].get(column_key, None)) # depends on [control=['for'], data=['column_key']] # depends on [control=['for'], data=[]]
df_items.append(row_data) # depends on [control=['for'], data=[]]
for (key, selected_columns) in [('data_ins', data_in_columns), ('data_outs', data_out_columns), ('scoped_data_ins', scoped_in_columns), ('scoped_data_outs', scoped_out_columns), ('semantic_data', semantic_data_columns)]:
df_keys.extend([key + '__' + s for s in selected_columns]) # depends on [control=['for'], data=[]]
df = pd.DataFrame(df_items, columns=df_keys)
# convert epoch to datetime
df.timestamp_call = pd.to_datetime(df.timestamp_call, unit='s')
df.timestamp_return = pd.to_datetime(df.timestamp_return, unit='s')
# use call timestamp as index
df_timed = df.set_index(df.timestamp_call)
df_timed.sort_index(inplace=True)
return df_timed |
def patch_namespaced_pod(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_pod # noqa: E501
partially update the specified Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_pod(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Pod (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Pod
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_pod_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_pod_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data | def function[patch_namespaced_pod, parameter[self, name, namespace, body]]:
constant[patch_namespaced_pod # noqa: E501
partially update the specified Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_pod(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Pod (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Pod
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_namespaced_pod_with_http_info, parameter[name[name], name[namespace], name[body]]]] | keyword[def] identifier[patch_namespaced_pod] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_namespaced_pod_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_namespaced_pod_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def patch_namespaced_pod(self, name, namespace, body, **kwargs): # noqa: E501
"patch_namespaced_pod # noqa: E501\n\n partially update the specified Pod # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_pod(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Pod (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Pod\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_pod_with_http_info(name, namespace, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.patch_namespaced_pod_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data |
def from_yaml(value, native_datetimes=True):
"""
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
if not yaml:
raise NotImplementedError('No supported YAML library available')
if native_datetimes:
loader = NativeDatesYamlLoader
else:
loader = StringedDatesYamlLoader
return yaml.load(value, Loader=loader) | def function[from_yaml, parameter[value, native_datetimes]]:
constant[
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
]
if <ast.UnaryOp object at 0x7da207f036a0> begin[:]
<ast.Raise object at 0x7da207f01ea0>
if name[native_datetimes] begin[:]
variable[loader] assign[=] name[NativeDatesYamlLoader]
return[call[name[yaml].load, parameter[name[value]]]] | keyword[def] identifier[from_yaml] ( identifier[value] , identifier[native_datetimes] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[yaml] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[native_datetimes] :
identifier[loader] = identifier[NativeDatesYamlLoader]
keyword[else] :
identifier[loader] = identifier[StringedDatesYamlLoader]
keyword[return] identifier[yaml] . identifier[load] ( identifier[value] , identifier[Loader] = identifier[loader] ) | def from_yaml(value, native_datetimes=True):
"""
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
if not yaml:
raise NotImplementedError('No supported YAML library available') # depends on [control=['if'], data=[]]
if native_datetimes:
loader = NativeDatesYamlLoader # depends on [control=['if'], data=[]]
else:
loader = StringedDatesYamlLoader
return yaml.load(value, Loader=loader) |
def add_arrow(self, tipLoc, tail=None, arrow=arrow.default):
"""This method adds a straight arrow that points to
@var{TIPLOC}, which is a tuple of integers. @var{TAIL}
specifies the starting point of the arrow. It is either None
or a string consisting of the following letters: 'l', 'c',
'r', 't', 'm,', and 'b'. Letters 'l', 'c', or 'r' means to
start the arrow from the left, center, or right of the text
box, respectively. Letters 't', 'm', or 'b' means to start the
arrow from the top, middle or bottom of the text box. For
example, when @samp{tail = 'tc'} then arrow is drawn from
top-center point of the text box. ARROW specifies the style of
the arrow. <<arrow>>.
"""
self._arrows.append((tipLoc, tail, arrow)) | def function[add_arrow, parameter[self, tipLoc, tail, arrow]]:
constant[This method adds a straight arrow that points to
@var{TIPLOC}, which is a tuple of integers. @var{TAIL}
specifies the starting point of the arrow. It is either None
or a string consisting of the following letters: 'l', 'c',
'r', 't', 'm,', and 'b'. Letters 'l', 'c', or 'r' means to
start the arrow from the left, center, or right of the text
box, respectively. Letters 't', 'm', or 'b' means to start the
arrow from the top, middle or bottom of the text box. For
example, when @samp{tail = 'tc'} then arrow is drawn from
top-center point of the text box. ARROW specifies the style of
the arrow. <<arrow>>.
]
call[name[self]._arrows.append, parameter[tuple[[<ast.Name object at 0x7da18bccb280>, <ast.Name object at 0x7da18bcc8a00>, <ast.Name object at 0x7da18bcc9510>]]]] | keyword[def] identifier[add_arrow] ( identifier[self] , identifier[tipLoc] , identifier[tail] = keyword[None] , identifier[arrow] = identifier[arrow] . identifier[default] ):
literal[string]
identifier[self] . identifier[_arrows] . identifier[append] (( identifier[tipLoc] , identifier[tail] , identifier[arrow] )) | def add_arrow(self, tipLoc, tail=None, arrow=arrow.default):
"""This method adds a straight arrow that points to
@var{TIPLOC}, which is a tuple of integers. @var{TAIL}
specifies the starting point of the arrow. It is either None
or a string consisting of the following letters: 'l', 'c',
'r', 't', 'm,', and 'b'. Letters 'l', 'c', or 'r' means to
start the arrow from the left, center, or right of the text
box, respectively. Letters 't', 'm', or 'b' means to start the
arrow from the top, middle or bottom of the text box. For
example, when @samp{tail = 'tc'} then arrow is drawn from
top-center point of the text box. ARROW specifies the style of
the arrow. <<arrow>>.
"""
self._arrows.append((tipLoc, tail, arrow)) |
def to_feature(value):
"""Convert the given value to Feature if necessary."""
if isinstance(value, FeatureConnector):
return value
elif utils.is_dtype(value): # tf.int32, tf.string,...
return Tensor(shape=(), dtype=tf.as_dtype(value))
elif isinstance(value, dict):
return FeaturesDict(value)
else:
raise ValueError('Feature not supported: {}'.format(value)) | def function[to_feature, parameter[value]]:
constant[Convert the given value to Feature if necessary.]
if call[name[isinstance], parameter[name[value], name[FeatureConnector]]] begin[:]
return[name[value]] | keyword[def] identifier[to_feature] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[FeatureConnector] ):
keyword[return] identifier[value]
keyword[elif] identifier[utils] . identifier[is_dtype] ( identifier[value] ):
keyword[return] identifier[Tensor] ( identifier[shape] =(), identifier[dtype] = identifier[tf] . identifier[as_dtype] ( identifier[value] ))
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[dict] ):
keyword[return] identifier[FeaturesDict] ( identifier[value] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[value] )) | def to_feature(value):
"""Convert the given value to Feature if necessary."""
if isinstance(value, FeatureConnector):
return value # depends on [control=['if'], data=[]]
elif utils.is_dtype(value): # tf.int32, tf.string,...
return Tensor(shape=(), dtype=tf.as_dtype(value)) # depends on [control=['if'], data=[]]
elif isinstance(value, dict):
return FeaturesDict(value) # depends on [control=['if'], data=[]]
else:
raise ValueError('Feature not supported: {}'.format(value)) |
def job_step_error(self, job_request_payload, message):
"""
Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error
"""
payload = JobStepErrorPayload(job_request_payload, message)
self.send(job_request_payload.error_command, payload) | def function[job_step_error, parameter[self, job_request_payload, message]]:
constant[
Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error
]
variable[payload] assign[=] call[name[JobStepErrorPayload], parameter[name[job_request_payload], name[message]]]
call[name[self].send, parameter[name[job_request_payload].error_command, name[payload]]] | keyword[def] identifier[job_step_error] ( identifier[self] , identifier[job_request_payload] , identifier[message] ):
literal[string]
identifier[payload] = identifier[JobStepErrorPayload] ( identifier[job_request_payload] , identifier[message] )
identifier[self] . identifier[send] ( identifier[job_request_payload] . identifier[error_command] , identifier[payload] ) | def job_step_error(self, job_request_payload, message):
"""
Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error
"""
payload = JobStepErrorPayload(job_request_payload, message)
self.send(job_request_payload.error_command, payload) |
def kron_with_controls(*matrices: np.ndarray) -> np.ndarray:
"""Computes the kronecker product of a sequence of matrices and controls.
Use linalg.CONTROL_TAG to represent controls. Any entry of the output
matrix corresponding to a situation where the control is not satisfied will
be overwritten by identity matrix elements.
The control logic works by imbuing NaN with the meaning "failed to meet one
or more controls". The normal kronecker product then spreads the per-item
NaNs to all the entries in the product that need to be replaced by identity
matrix elements. This method rewrites those NaNs. Thus CONTROL_TAG can be
the matrix [[NaN, 0], [0, 1]] or equivalently [[NaN, NaN], [NaN, 1]].
Because this method re-interprets NaNs as control-failed elements, it won't
propagate error-indicating NaNs from its input to its output in the way
you'd otherwise expect.
Args:
*matrices: The matrices and controls to combine with the kronecker
product.
Returns:
The resulting matrix.
"""
product = kron(*matrices)
# The NaN from CONTROL_TAG spreads to everywhere identity belongs.
for i in range(product.shape[0]):
for j in range(product.shape[1]):
if np.isnan(product[i, j]):
product[i, j] = 1 if i == j else 0
return product | def function[kron_with_controls, parameter[]]:
constant[Computes the kronecker product of a sequence of matrices and controls.
Use linalg.CONTROL_TAG to represent controls. Any entry of the output
matrix corresponding to a situation where the control is not satisfied will
be overwritten by identity matrix elements.
The control logic works by imbuing NaN with the meaning "failed to meet one
or more controls". The normal kronecker product then spreads the per-item
NaNs to all the entries in the product that need to be replaced by identity
matrix elements. This method rewrites those NaNs. Thus CONTROL_TAG can be
the matrix [[NaN, 0], [0, 1]] or equivalently [[NaN, NaN], [NaN, 1]].
Because this method re-interprets NaNs as control-failed elements, it won't
propagate error-indicating NaNs from its input to its output in the way
you'd otherwise expect.
Args:
*matrices: The matrices and controls to combine with the kronecker
product.
Returns:
The resulting matrix.
]
variable[product] assign[=] call[name[kron], parameter[<ast.Starred object at 0x7da204620850>]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[product].shape][constant[0]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[product].shape][constant[1]]]]] begin[:]
if call[name[np].isnan, parameter[call[name[product]][tuple[[<ast.Name object at 0x7da204622e00>, <ast.Name object at 0x7da204620eb0>]]]]] begin[:]
call[name[product]][tuple[[<ast.Name object at 0x7da204621db0>, <ast.Name object at 0x7da1b1cef1f0>]]] assign[=] <ast.IfExp object at 0x7da1b1cedff0>
return[name[product]] | keyword[def] identifier[kron_with_controls] (* identifier[matrices] : identifier[np] . identifier[ndarray] )-> identifier[np] . identifier[ndarray] :
literal[string]
identifier[product] = identifier[kron] (* identifier[matrices] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[product] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[product] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[np] . identifier[isnan] ( identifier[product] [ identifier[i] , identifier[j] ]):
identifier[product] [ identifier[i] , identifier[j] ]= literal[int] keyword[if] identifier[i] == identifier[j] keyword[else] literal[int]
keyword[return] identifier[product] | def kron_with_controls(*matrices: np.ndarray) -> np.ndarray:
"""Computes the kronecker product of a sequence of matrices and controls.
Use linalg.CONTROL_TAG to represent controls. Any entry of the output
matrix corresponding to a situation where the control is not satisfied will
be overwritten by identity matrix elements.
The control logic works by imbuing NaN with the meaning "failed to meet one
or more controls". The normal kronecker product then spreads the per-item
NaNs to all the entries in the product that need to be replaced by identity
matrix elements. This method rewrites those NaNs. Thus CONTROL_TAG can be
the matrix [[NaN, 0], [0, 1]] or equivalently [[NaN, NaN], [NaN, 1]].
Because this method re-interprets NaNs as control-failed elements, it won't
propagate error-indicating NaNs from its input to its output in the way
you'd otherwise expect.
Args:
*matrices: The matrices and controls to combine with the kronecker
product.
Returns:
The resulting matrix.
"""
product = kron(*matrices)
# The NaN from CONTROL_TAG spreads to everywhere identity belongs.
for i in range(product.shape[0]):
for j in range(product.shape[1]):
if np.isnan(product[i, j]):
product[i, j] = 1 if i == j else 0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
return product |
def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring') | def function[_data_root_Linux, parameter[]]:
constant[
Use freedesktop.org Base Dir Specfication to determine storage
location.
]
variable[fallback] assign[=] call[name[os].path.expanduser, parameter[constant[~/.local/share]]]
variable[root] assign[=] <ast.BoolOp object at 0x7da18c4cc880>
return[call[name[os].path.join, parameter[name[root], constant[python_keyring]]]] | keyword[def] identifier[_data_root_Linux] ():
literal[string]
identifier[fallback] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
identifier[root] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[fallback]
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[root] , literal[string] ) | def _data_root_Linux():
"""
Use freedesktop.org Base Dir Specfication to determine storage
location.
"""
fallback = os.path.expanduser('~/.local/share')
root = os.environ.get('XDG_DATA_HOME', None) or fallback
return os.path.join(root, 'python_keyring') |
async def list_state(self, request):
"""Fetches list of data entries, optionally filtered by address prefix.
Request:
query:
- head: The id of the block to use as the head of the chain
- address: Return entries whose addresses begin with this
prefix
Response:
data: An array of leaf objects with address and data keys
head: The head used for this query (most recent if unspecified)
link: The link to this exact query, including head block
paging: Paging info and nav, like total resources and a next link
"""
paging_controls = self._get_paging_controls(request)
head, root = await self._head_to_root(request.url.query.get(
'head', None))
validator_query = client_state_pb2.ClientStateListRequest(
state_root=root,
address=request.url.query.get('address', None),
sorting=self._get_sorting_message(request, "default"),
paging=self._make_paging_message(paging_controls))
response = await self._query_validator(
Message.CLIENT_STATE_LIST_REQUEST,
client_state_pb2.ClientStateListResponse,
validator_query)
return self._wrap_paginated_response(
request=request,
response=response,
controls=paging_controls,
data=response.get('entries', []),
head=head) | <ast.AsyncFunctionDef object at 0x7da18ede5ae0> | keyword[async] keyword[def] identifier[list_state] ( identifier[self] , identifier[request] ):
literal[string]
identifier[paging_controls] = identifier[self] . identifier[_get_paging_controls] ( identifier[request] )
identifier[head] , identifier[root] = keyword[await] identifier[self] . identifier[_head_to_root] ( identifier[request] . identifier[url] . identifier[query] . identifier[get] (
literal[string] , keyword[None] ))
identifier[validator_query] = identifier[client_state_pb2] . identifier[ClientStateListRequest] (
identifier[state_root] = identifier[root] ,
identifier[address] = identifier[request] . identifier[url] . identifier[query] . identifier[get] ( literal[string] , keyword[None] ),
identifier[sorting] = identifier[self] . identifier[_get_sorting_message] ( identifier[request] , literal[string] ),
identifier[paging] = identifier[self] . identifier[_make_paging_message] ( identifier[paging_controls] ))
identifier[response] = keyword[await] identifier[self] . identifier[_query_validator] (
identifier[Message] . identifier[CLIENT_STATE_LIST_REQUEST] ,
identifier[client_state_pb2] . identifier[ClientStateListResponse] ,
identifier[validator_query] )
keyword[return] identifier[self] . identifier[_wrap_paginated_response] (
identifier[request] = identifier[request] ,
identifier[response] = identifier[response] ,
identifier[controls] = identifier[paging_controls] ,
identifier[data] = identifier[response] . identifier[get] ( literal[string] ,[]),
identifier[head] = identifier[head] ) | async def list_state(self, request):
"""Fetches list of data entries, optionally filtered by address prefix.
Request:
query:
- head: The id of the block to use as the head of the chain
- address: Return entries whose addresses begin with this
prefix
Response:
data: An array of leaf objects with address and data keys
head: The head used for this query (most recent if unspecified)
link: The link to this exact query, including head block
paging: Paging info and nav, like total resources and a next link
"""
paging_controls = self._get_paging_controls(request)
(head, root) = await self._head_to_root(request.url.query.get('head', None))
validator_query = client_state_pb2.ClientStateListRequest(state_root=root, address=request.url.query.get('address', None), sorting=self._get_sorting_message(request, 'default'), paging=self._make_paging_message(paging_controls))
response = await self._query_validator(Message.CLIENT_STATE_LIST_REQUEST, client_state_pb2.ClientStateListResponse, validator_query)
return self._wrap_paginated_response(request=request, response=response, controls=paging_controls, data=response.get('entries', []), head=head) |
def _set_players(self, _players):
"""
Players will always be set in seat order (1,2,3,4)
"""
self._players = list()
_players = list(_players)
_players.sort(key=lambda p: p.seat)
for p in _players:
self._players.append(p) | def function[_set_players, parameter[self, _players]]:
constant[
Players will always be set in seat order (1,2,3,4)
]
name[self]._players assign[=] call[name[list], parameter[]]
variable[_players] assign[=] call[name[list], parameter[name[_players]]]
call[name[_players].sort, parameter[]]
for taget[name[p]] in starred[name[_players]] begin[:]
call[name[self]._players.append, parameter[name[p]]] | keyword[def] identifier[_set_players] ( identifier[self] , identifier[_players] ):
literal[string]
identifier[self] . identifier[_players] = identifier[list] ()
identifier[_players] = identifier[list] ( identifier[_players] )
identifier[_players] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[p] : identifier[p] . identifier[seat] )
keyword[for] identifier[p] keyword[in] identifier[_players] :
identifier[self] . identifier[_players] . identifier[append] ( identifier[p] ) | def _set_players(self, _players):
"""
Players will always be set in seat order (1,2,3,4)
"""
self._players = list()
_players = list(_players)
_players.sort(key=lambda p: p.seat)
for p in _players:
self._players.append(p) # depends on [control=['for'], data=['p']] |
def set_json(domain, action, filename=False, record=False):
"""Convert text file to JSON.
Arguments:
domain: domain name of updating target
action: True ; for PUT/POST HTTP method
False; for DELETE HTTP method
filename: text file of bulk updating (default is False)
record: json record of updating single record (default is False)
"""
o = JSONConverter(domain)
if filename:
# for 'bulk_create/bulk_delete'
with open(filename, 'r') as f:
o.separate_input_file(f)
for item in o.separated_list:
o.read_records(item.splitlines())
o.generata_data(action)
elif record:
# for 'create/delete'
o.read_records(record)
o.generata_data(action)
return o.dict_records | def function[set_json, parameter[domain, action, filename, record]]:
constant[Convert text file to JSON.
Arguments:
domain: domain name of updating target
action: True ; for PUT/POST HTTP method
False; for DELETE HTTP method
filename: text file of bulk updating (default is False)
record: json record of updating single record (default is False)
]
variable[o] assign[=] call[name[JSONConverter], parameter[name[domain]]]
if name[filename] begin[:]
with call[name[open], parameter[name[filename], constant[r]]] begin[:]
call[name[o].separate_input_file, parameter[name[f]]]
for taget[name[item]] in starred[name[o].separated_list] begin[:]
call[name[o].read_records, parameter[call[name[item].splitlines, parameter[]]]]
call[name[o].generata_data, parameter[name[action]]]
return[name[o].dict_records] | keyword[def] identifier[set_json] ( identifier[domain] , identifier[action] , identifier[filename] = keyword[False] , identifier[record] = keyword[False] ):
literal[string]
identifier[o] = identifier[JSONConverter] ( identifier[domain] )
keyword[if] identifier[filename] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[o] . identifier[separate_input_file] ( identifier[f] )
keyword[for] identifier[item] keyword[in] identifier[o] . identifier[separated_list] :
identifier[o] . identifier[read_records] ( identifier[item] . identifier[splitlines] ())
identifier[o] . identifier[generata_data] ( identifier[action] )
keyword[elif] identifier[record] :
identifier[o] . identifier[read_records] ( identifier[record] )
identifier[o] . identifier[generata_data] ( identifier[action] )
keyword[return] identifier[o] . identifier[dict_records] | def set_json(domain, action, filename=False, record=False):
"""Convert text file to JSON.
Arguments:
domain: domain name of updating target
action: True ; for PUT/POST HTTP method
False; for DELETE HTTP method
filename: text file of bulk updating (default is False)
record: json record of updating single record (default is False)
"""
o = JSONConverter(domain)
if filename:
# for 'bulk_create/bulk_delete'
with open(filename, 'r') as f:
o.separate_input_file(f)
for item in o.separated_list:
o.read_records(item.splitlines())
o.generata_data(action) # depends on [control=['for'], data=['item']] # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
elif record:
# for 'create/delete'
o.read_records(record)
o.generata_data(action) # depends on [control=['if'], data=[]]
return o.dict_records |
def _filter_binding(self, binding):
"""
Filter binding from ISBN record. In MARC XML / OAI, the binding
information is stored in same subrecord as ISBN.
Example:
``<subfield code="a">80-251-0225-4 (brož.) :</subfield>`` ->
``brož.``.
"""
binding = binding.strip().split(" ", 1)[-1] # isolate bind. from ISBN
binding = remove_hairs_fn(binding) # remove special chars from binding
return binding.split(":")[-1].strip() | def function[_filter_binding, parameter[self, binding]]:
constant[
Filter binding from ISBN record. In MARC XML / OAI, the binding
information is stored in same subrecord as ISBN.
Example:
``<subfield code="a">80-251-0225-4 (brož.) :</subfield>`` ->
``brož.``.
]
variable[binding] assign[=] call[call[call[name[binding].strip, parameter[]].split, parameter[constant[ ], constant[1]]]][<ast.UnaryOp object at 0x7da1afefa680>]
variable[binding] assign[=] call[name[remove_hairs_fn], parameter[name[binding]]]
return[call[call[call[name[binding].split, parameter[constant[:]]]][<ast.UnaryOp object at 0x7da1afefa980>].strip, parameter[]]] | keyword[def] identifier[_filter_binding] ( identifier[self] , identifier[binding] ):
literal[string]
identifier[binding] = identifier[binding] . identifier[strip] (). identifier[split] ( literal[string] , literal[int] )[- literal[int] ]
identifier[binding] = identifier[remove_hairs_fn] ( identifier[binding] )
keyword[return] identifier[binding] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[strip] () | def _filter_binding(self, binding):
"""
Filter binding from ISBN record. In MARC XML / OAI, the binding
information is stored in same subrecord as ISBN.
Example:
``<subfield code="a">80-251-0225-4 (brož.) :</subfield>`` ->
``brož.``.
"""
binding = binding.strip().split(' ', 1)[-1] # isolate bind. from ISBN
binding = remove_hairs_fn(binding) # remove special chars from binding
return binding.split(':')[-1].strip() |
def parse_html_urls(file_name, html_data):
'''
Returns a list of tuples in the form (url, file_name, line_number)
'''
try:
html = lxml.html.fromstring(html_data)
anchor_tags = html.cssselect('a')
for a in anchor_tags:
# A link was started but not finished, href with nothing set!
if not 'href' in a.attrib or a.attrib['href'] == '':
BROKEN_URLS.append(('None', file_name, a.sourceline))
url = clean_url(a.attrib['href'])
if is_valid_url(url):
if url not in URL_CACHE:
URL_CACHE.add(url)
yield (url, file_name, a.sourceline)
except SyntaxError:
pass | def function[parse_html_urls, parameter[file_name, html_data]]:
constant[
Returns a list of tuples in the form (url, file_name, line_number)
]
<ast.Try object at 0x7da1b14db8e0> | keyword[def] identifier[parse_html_urls] ( identifier[file_name] , identifier[html_data] ):
literal[string]
keyword[try] :
identifier[html] = identifier[lxml] . identifier[html] . identifier[fromstring] ( identifier[html_data] )
identifier[anchor_tags] = identifier[html] . identifier[cssselect] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[anchor_tags] :
keyword[if] keyword[not] literal[string] keyword[in] identifier[a] . identifier[attrib] keyword[or] identifier[a] . identifier[attrib] [ literal[string] ]== literal[string] :
identifier[BROKEN_URLS] . identifier[append] (( literal[string] , identifier[file_name] , identifier[a] . identifier[sourceline] ))
identifier[url] = identifier[clean_url] ( identifier[a] . identifier[attrib] [ literal[string] ])
keyword[if] identifier[is_valid_url] ( identifier[url] ):
keyword[if] identifier[url] keyword[not] keyword[in] identifier[URL_CACHE] :
identifier[URL_CACHE] . identifier[add] ( identifier[url] )
keyword[yield] ( identifier[url] , identifier[file_name] , identifier[a] . identifier[sourceline] )
keyword[except] identifier[SyntaxError] :
keyword[pass] | def parse_html_urls(file_name, html_data):
"""
Returns a list of tuples in the form (url, file_name, line_number)
"""
try:
html = lxml.html.fromstring(html_data)
anchor_tags = html.cssselect('a')
for a in anchor_tags:
# A link was started but not finished, href with nothing set!
if not 'href' in a.attrib or a.attrib['href'] == '':
BROKEN_URLS.append(('None', file_name, a.sourceline)) # depends on [control=['if'], data=[]]
url = clean_url(a.attrib['href'])
if is_valid_url(url):
if url not in URL_CACHE:
URL_CACHE.add(url)
yield (url, file_name, a.sourceline) # depends on [control=['if'], data=['url', 'URL_CACHE']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # depends on [control=['try'], data=[]]
except SyntaxError:
pass # depends on [control=['except'], data=[]] |
def positions(weights, period, freq=None):
"""
Builds net position values time series, the portfolio percentage invested
in each position.
Parameters
----------
weights: pd.Series
pd.Series containing factor weights, the index contains timestamps at
which the trades are computed and the values correspond to assets
weights
- see factor_weights for more details
period: pandas.Timedelta or string
Assets holding period (1 day, 2 mins, 3 hours etc). It can be a
Timedelta or a string in the format accepted by Timedelta constructor
('1 days', '1D', '30m', '3h', '1D1h', etc)
freq : pandas DateOffset, optional
Used to specify a particular trading calendar. If not present
weights.index.freq will be used
Returns
-------
pd.DataFrame
Assets positions series, datetime on index, assets on columns.
Example:
index 'AAPL' 'MSFT' cash
2004-01-09 10:30:00 13939.3800 -14012.9930 711.5585
2004-01-09 15:30:00 0.00 -16012.9930 411.5585
2004-01-12 10:30:00 14492.6300 -14624.8700 0.0
2004-01-12 15:30:00 14874.5400 -15841.2500 0.0
2004-01-13 10:30:00 -13853.2800 13653.6400 -43.6375
"""
weights = weights.unstack()
if not isinstance(period, pd.Timedelta):
period = pd.Timedelta(period)
if freq is None:
freq = weights.index.freq
if freq is None:
freq = BDay()
warnings.warn("'freq' not set, using business day calendar",
UserWarning)
#
# weights index contains factor computation timestamps, then add returns
# timestamps too (factor timestamps + period) and save them to 'full_idx'
# 'full_idx' index will contain an entry for each point in time the weights
# change and hence they have to be re-computed
#
trades_idx = weights.index.copy()
returns_idx = utils.add_custom_calendar_timedelta(trades_idx, period, freq)
weights_idx = trades_idx.union(returns_idx)
#
# Compute portfolio weights for each point in time contained in the index
#
portfolio_weights = pd.DataFrame(index=weights_idx,
columns=weights.columns)
active_weights = []
for curr_time in weights_idx:
#
# fetch new weights that become available at curr_time and store them
# in active weights
#
if curr_time in weights.index:
assets_weights = weights.loc[curr_time]
expire_ts = utils.add_custom_calendar_timedelta(curr_time,
period, freq)
active_weights.append((expire_ts, assets_weights))
#
# remove expired entry in active_weights (older than 'period')
#
if active_weights:
expire_ts, assets_weights = active_weights[0]
if expire_ts <= curr_time:
active_weights.pop(0)
if not active_weights:
continue
#
# Compute total weights for curr_time and store them
#
tot_weights = [w for (ts, w) in active_weights]
tot_weights = pd.concat(tot_weights, axis=1)
tot_weights = tot_weights.sum(axis=1)
tot_weights /= tot_weights.abs().sum()
portfolio_weights.loc[curr_time] = tot_weights
return portfolio_weights.fillna(0) | def function[positions, parameter[weights, period, freq]]:
constant[
Builds net position values time series, the portfolio percentage invested
in each position.
Parameters
----------
weights: pd.Series
pd.Series containing factor weights, the index contains timestamps at
which the trades are computed and the values correspond to assets
weights
- see factor_weights for more details
period: pandas.Timedelta or string
Assets holding period (1 day, 2 mins, 3 hours etc). It can be a
Timedelta or a string in the format accepted by Timedelta constructor
('1 days', '1D', '30m', '3h', '1D1h', etc)
freq : pandas DateOffset, optional
Used to specify a particular trading calendar. If not present
weights.index.freq will be used
Returns
-------
pd.DataFrame
Assets positions series, datetime on index, assets on columns.
Example:
index 'AAPL' 'MSFT' cash
2004-01-09 10:30:00 13939.3800 -14012.9930 711.5585
2004-01-09 15:30:00 0.00 -16012.9930 411.5585
2004-01-12 10:30:00 14492.6300 -14624.8700 0.0
2004-01-12 15:30:00 14874.5400 -15841.2500 0.0
2004-01-13 10:30:00 -13853.2800 13653.6400 -43.6375
]
variable[weights] assign[=] call[name[weights].unstack, parameter[]]
if <ast.UnaryOp object at 0x7da18bcc9f00> begin[:]
variable[period] assign[=] call[name[pd].Timedelta, parameter[name[period]]]
if compare[name[freq] is constant[None]] begin[:]
variable[freq] assign[=] name[weights].index.freq
if compare[name[freq] is constant[None]] begin[:]
variable[freq] assign[=] call[name[BDay], parameter[]]
call[name[warnings].warn, parameter[constant['freq' not set, using business day calendar], name[UserWarning]]]
variable[trades_idx] assign[=] call[name[weights].index.copy, parameter[]]
variable[returns_idx] assign[=] call[name[utils].add_custom_calendar_timedelta, parameter[name[trades_idx], name[period], name[freq]]]
variable[weights_idx] assign[=] call[name[trades_idx].union, parameter[name[returns_idx]]]
variable[portfolio_weights] assign[=] call[name[pd].DataFrame, parameter[]]
variable[active_weights] assign[=] list[[]]
for taget[name[curr_time]] in starred[name[weights_idx]] begin[:]
if compare[name[curr_time] in name[weights].index] begin[:]
variable[assets_weights] assign[=] call[name[weights].loc][name[curr_time]]
variable[expire_ts] assign[=] call[name[utils].add_custom_calendar_timedelta, parameter[name[curr_time], name[period], name[freq]]]
call[name[active_weights].append, parameter[tuple[[<ast.Name object at 0x7da18bcc8250>, <ast.Name object at 0x7da18bcc8760>]]]]
if name[active_weights] begin[:]
<ast.Tuple object at 0x7da18bcc9fc0> assign[=] call[name[active_weights]][constant[0]]
if compare[name[expire_ts] less_or_equal[<=] name[curr_time]] begin[:]
call[name[active_weights].pop, parameter[constant[0]]]
if <ast.UnaryOp object at 0x7da18bcc9a20> begin[:]
continue
variable[tot_weights] assign[=] <ast.ListComp object at 0x7da20e956b30>
variable[tot_weights] assign[=] call[name[pd].concat, parameter[name[tot_weights]]]
variable[tot_weights] assign[=] call[name[tot_weights].sum, parameter[]]
<ast.AugAssign object at 0x7da20e957370>
call[name[portfolio_weights].loc][name[curr_time]] assign[=] name[tot_weights]
return[call[name[portfolio_weights].fillna, parameter[constant[0]]]] | keyword[def] identifier[positions] ( identifier[weights] , identifier[period] , identifier[freq] = keyword[None] ):
literal[string]
identifier[weights] = identifier[weights] . identifier[unstack] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[period] , identifier[pd] . identifier[Timedelta] ):
identifier[period] = identifier[pd] . identifier[Timedelta] ( identifier[period] )
keyword[if] identifier[freq] keyword[is] keyword[None] :
identifier[freq] = identifier[weights] . identifier[index] . identifier[freq]
keyword[if] identifier[freq] keyword[is] keyword[None] :
identifier[freq] = identifier[BDay] ()
identifier[warnings] . identifier[warn] ( literal[string] ,
identifier[UserWarning] )
identifier[trades_idx] = identifier[weights] . identifier[index] . identifier[copy] ()
identifier[returns_idx] = identifier[utils] . identifier[add_custom_calendar_timedelta] ( identifier[trades_idx] , identifier[period] , identifier[freq] )
identifier[weights_idx] = identifier[trades_idx] . identifier[union] ( identifier[returns_idx] )
identifier[portfolio_weights] = identifier[pd] . identifier[DataFrame] ( identifier[index] = identifier[weights_idx] ,
identifier[columns] = identifier[weights] . identifier[columns] )
identifier[active_weights] =[]
keyword[for] identifier[curr_time] keyword[in] identifier[weights_idx] :
keyword[if] identifier[curr_time] keyword[in] identifier[weights] . identifier[index] :
identifier[assets_weights] = identifier[weights] . identifier[loc] [ identifier[curr_time] ]
identifier[expire_ts] = identifier[utils] . identifier[add_custom_calendar_timedelta] ( identifier[curr_time] ,
identifier[period] , identifier[freq] )
identifier[active_weights] . identifier[append] (( identifier[expire_ts] , identifier[assets_weights] ))
keyword[if] identifier[active_weights] :
identifier[expire_ts] , identifier[assets_weights] = identifier[active_weights] [ literal[int] ]
keyword[if] identifier[expire_ts] <= identifier[curr_time] :
identifier[active_weights] . identifier[pop] ( literal[int] )
keyword[if] keyword[not] identifier[active_weights] :
keyword[continue]
identifier[tot_weights] =[ identifier[w] keyword[for] ( identifier[ts] , identifier[w] ) keyword[in] identifier[active_weights] ]
identifier[tot_weights] = identifier[pd] . identifier[concat] ( identifier[tot_weights] , identifier[axis] = literal[int] )
identifier[tot_weights] = identifier[tot_weights] . identifier[sum] ( identifier[axis] = literal[int] )
identifier[tot_weights] /= identifier[tot_weights] . identifier[abs] (). identifier[sum] ()
identifier[portfolio_weights] . identifier[loc] [ identifier[curr_time] ]= identifier[tot_weights]
keyword[return] identifier[portfolio_weights] . identifier[fillna] ( literal[int] ) | def positions(weights, period, freq=None):
"""
Builds net position values time series, the portfolio percentage invested
in each position.
Parameters
----------
weights: pd.Series
pd.Series containing factor weights, the index contains timestamps at
which the trades are computed and the values correspond to assets
weights
- see factor_weights for more details
period: pandas.Timedelta or string
Assets holding period (1 day, 2 mins, 3 hours etc). It can be a
Timedelta or a string in the format accepted by Timedelta constructor
('1 days', '1D', '30m', '3h', '1D1h', etc)
freq : pandas DateOffset, optional
Used to specify a particular trading calendar. If not present
weights.index.freq will be used
Returns
-------
pd.DataFrame
Assets positions series, datetime on index, assets on columns.
Example:
index 'AAPL' 'MSFT' cash
2004-01-09 10:30:00 13939.3800 -14012.9930 711.5585
2004-01-09 15:30:00 0.00 -16012.9930 411.5585
2004-01-12 10:30:00 14492.6300 -14624.8700 0.0
2004-01-12 15:30:00 14874.5400 -15841.2500 0.0
2004-01-13 10:30:00 -13853.2800 13653.6400 -43.6375
"""
weights = weights.unstack()
if not isinstance(period, pd.Timedelta):
period = pd.Timedelta(period) # depends on [control=['if'], data=[]]
if freq is None:
freq = weights.index.freq # depends on [control=['if'], data=['freq']]
if freq is None:
freq = BDay()
warnings.warn("'freq' not set, using business day calendar", UserWarning) # depends on [control=['if'], data=['freq']]
#
# weights index contains factor computation timestamps, then add returns
# timestamps too (factor timestamps + period) and save them to 'full_idx'
# 'full_idx' index will contain an entry for each point in time the weights
# change and hence they have to be re-computed
#
trades_idx = weights.index.copy()
returns_idx = utils.add_custom_calendar_timedelta(trades_idx, period, freq)
weights_idx = trades_idx.union(returns_idx)
#
# Compute portfolio weights for each point in time contained in the index
#
portfolio_weights = pd.DataFrame(index=weights_idx, columns=weights.columns)
active_weights = []
for curr_time in weights_idx:
#
# fetch new weights that become available at curr_time and store them
# in active weights
#
if curr_time in weights.index:
assets_weights = weights.loc[curr_time]
expire_ts = utils.add_custom_calendar_timedelta(curr_time, period, freq)
active_weights.append((expire_ts, assets_weights)) # depends on [control=['if'], data=['curr_time']]
#
# remove expired entry in active_weights (older than 'period')
#
if active_weights:
(expire_ts, assets_weights) = active_weights[0]
if expire_ts <= curr_time:
active_weights.pop(0) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not active_weights:
continue # depends on [control=['if'], data=[]]
#
# Compute total weights for curr_time and store them
#
tot_weights = [w for (ts, w) in active_weights]
tot_weights = pd.concat(tot_weights, axis=1)
tot_weights = tot_weights.sum(axis=1)
tot_weights /= tot_weights.abs().sum()
portfolio_weights.loc[curr_time] = tot_weights # depends on [control=['for'], data=['curr_time']]
return portfolio_weights.fillna(0) |
def _load_json_file(self, file, decoder=None):
"""
Load data from json file
:param file: Readable file or path to file
:type file: FileIO | str | unicode
:param decoder: Use custom json decoder
:type decoder: T <= flotils.loadable.DateTimeDecoder
:return: Json data
:rtype: None | int | float | str | list | dict
:raises IOError: Failed to load
"""
try:
res = load_json_file(file, decoder=decoder)
except ValueError as e:
if "{}".format(e) == "No JSON object could be decoded":
raise IOError("Decoding JSON failed")
self.exception("Failed to load from {}".format(file))
raise IOError("Loading file failed")
except:
self.exception("Failed to load from {}".format(file))
raise IOError("Loading file failed")
return res | def function[_load_json_file, parameter[self, file, decoder]]:
constant[
Load data from json file
:param file: Readable file or path to file
:type file: FileIO | str | unicode
:param decoder: Use custom json decoder
:type decoder: T <= flotils.loadable.DateTimeDecoder
:return: Json data
:rtype: None | int | float | str | list | dict
:raises IOError: Failed to load
]
<ast.Try object at 0x7da1b10ce590>
return[name[res]] | keyword[def] identifier[_load_json_file] ( identifier[self] , identifier[file] , identifier[decoder] = keyword[None] ):
literal[string]
keyword[try] :
identifier[res] = identifier[load_json_file] ( identifier[file] , identifier[decoder] = identifier[decoder] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[if] literal[string] . identifier[format] ( identifier[e] )== literal[string] :
keyword[raise] identifier[IOError] ( literal[string] )
identifier[self] . identifier[exception] ( literal[string] . identifier[format] ( identifier[file] ))
keyword[raise] identifier[IOError] ( literal[string] )
keyword[except] :
identifier[self] . identifier[exception] ( literal[string] . identifier[format] ( identifier[file] ))
keyword[raise] identifier[IOError] ( literal[string] )
keyword[return] identifier[res] | def _load_json_file(self, file, decoder=None):
"""
Load data from json file
:param file: Readable file or path to file
:type file: FileIO | str | unicode
:param decoder: Use custom json decoder
:type decoder: T <= flotils.loadable.DateTimeDecoder
:return: Json data
:rtype: None | int | float | str | list | dict
:raises IOError: Failed to load
"""
try:
res = load_json_file(file, decoder=decoder) # depends on [control=['try'], data=[]]
except ValueError as e:
if '{}'.format(e) == 'No JSON object could be decoded':
raise IOError('Decoding JSON failed') # depends on [control=['if'], data=[]]
self.exception('Failed to load from {}'.format(file))
raise IOError('Loading file failed') # depends on [control=['except'], data=['e']]
except:
self.exception('Failed to load from {}'.format(file))
raise IOError('Loading file failed') # depends on [control=['except'], data=[]]
return res |
def set(self, section, option, value=None):
"""
Extends :meth:`~configparser.ConfigParser.set` by auto formatting byte strings into unicode strings.
"""
if isinstance(section, bytes):
section = section.decode('utf8')
if isinstance(option, bytes):
option = option.decode('utf8')
if isinstance(value, bytes):
value = value.decode('utf8')
return super(VSGConfigParser, self).set(section, option, value) | def function[set, parameter[self, section, option, value]]:
constant[
Extends :meth:`~configparser.ConfigParser.set` by auto formatting byte strings into unicode strings.
]
if call[name[isinstance], parameter[name[section], name[bytes]]] begin[:]
variable[section] assign[=] call[name[section].decode, parameter[constant[utf8]]]
if call[name[isinstance], parameter[name[option], name[bytes]]] begin[:]
variable[option] assign[=] call[name[option].decode, parameter[constant[utf8]]]
if call[name[isinstance], parameter[name[value], name[bytes]]] begin[:]
variable[value] assign[=] call[name[value].decode, parameter[constant[utf8]]]
return[call[call[name[super], parameter[name[VSGConfigParser], name[self]]].set, parameter[name[section], name[option], name[value]]]] | keyword[def] identifier[set] ( identifier[self] , identifier[section] , identifier[option] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[section] , identifier[bytes] ):
identifier[section] = identifier[section] . identifier[decode] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[option] , identifier[bytes] ):
identifier[option] = identifier[option] . identifier[decode] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[bytes] ):
identifier[value] = identifier[value] . identifier[decode] ( literal[string] )
keyword[return] identifier[super] ( identifier[VSGConfigParser] , identifier[self] ). identifier[set] ( identifier[section] , identifier[option] , identifier[value] ) | def set(self, section, option, value=None):
"""
Extends :meth:`~configparser.ConfigParser.set` by auto formatting byte strings into unicode strings.
"""
if isinstance(section, bytes):
section = section.decode('utf8') # depends on [control=['if'], data=[]]
if isinstance(option, bytes):
option = option.decode('utf8') # depends on [control=['if'], data=[]]
if isinstance(value, bytes):
value = value.decode('utf8') # depends on [control=['if'], data=[]]
return super(VSGConfigParser, self).set(section, option, value) |
def output(self, name='',dest=''):
"Output PDF to some destination"
#Finish document if necessary
if(self.state<3):
self.close()
dest=dest.upper()
if(dest==''):
if(name==''):
name='doc.pdf'
dest='I'
else:
dest='F'
if dest=='I':
print(self.buffer)
elif dest=='D':
print(self.buffer)
elif dest=='F':
#Save to local file
f=open(name,'wb')
if(not f):
self.error('Unable to create output file: '+name)
if PY3K:
# manage binary data as latin1 until PEP461 or similar is implemented
f.write(self.buffer.encode("latin1"))
else:
f.write(self.buffer)
f.close()
elif dest=='S':
#Return as a string
return self.buffer
else:
self.error('Incorrect output destination: '+dest)
return '' | def function[output, parameter[self, name, dest]]:
constant[Output PDF to some destination]
if compare[name[self].state less[<] constant[3]] begin[:]
call[name[self].close, parameter[]]
variable[dest] assign[=] call[name[dest].upper, parameter[]]
if compare[name[dest] equal[==] constant[]] begin[:]
if compare[name[name] equal[==] constant[]] begin[:]
variable[name] assign[=] constant[doc.pdf]
variable[dest] assign[=] constant[I]
if compare[name[dest] equal[==] constant[I]] begin[:]
call[name[print], parameter[name[self].buffer]]
return[constant[]] | keyword[def] identifier[output] ( identifier[self] , identifier[name] = literal[string] , identifier[dest] = literal[string] ):
literal[string]
keyword[if] ( identifier[self] . identifier[state] < literal[int] ):
identifier[self] . identifier[close] ()
identifier[dest] = identifier[dest] . identifier[upper] ()
keyword[if] ( identifier[dest] == literal[string] ):
keyword[if] ( identifier[name] == literal[string] ):
identifier[name] = literal[string]
identifier[dest] = literal[string]
keyword[else] :
identifier[dest] = literal[string]
keyword[if] identifier[dest] == literal[string] :
identifier[print] ( identifier[self] . identifier[buffer] )
keyword[elif] identifier[dest] == literal[string] :
identifier[print] ( identifier[self] . identifier[buffer] )
keyword[elif] identifier[dest] == literal[string] :
identifier[f] = identifier[open] ( identifier[name] , literal[string] )
keyword[if] ( keyword[not] identifier[f] ):
identifier[self] . identifier[error] ( literal[string] + identifier[name] )
keyword[if] identifier[PY3K] :
identifier[f] . identifier[write] ( identifier[self] . identifier[buffer] . identifier[encode] ( literal[string] ))
keyword[else] :
identifier[f] . identifier[write] ( identifier[self] . identifier[buffer] )
identifier[f] . identifier[close] ()
keyword[elif] identifier[dest] == literal[string] :
keyword[return] identifier[self] . identifier[buffer]
keyword[else] :
identifier[self] . identifier[error] ( literal[string] + identifier[dest] )
keyword[return] literal[string] | def output(self, name='', dest=''):
"""Output PDF to some destination"""
#Finish document if necessary
if self.state < 3:
self.close() # depends on [control=['if'], data=[]]
dest = dest.upper()
if dest == '':
if name == '':
name = 'doc.pdf'
dest = 'I' # depends on [control=['if'], data=['name']]
else:
dest = 'F' # depends on [control=['if'], data=['dest']]
if dest == 'I':
print(self.buffer) # depends on [control=['if'], data=[]]
elif dest == 'D':
print(self.buffer) # depends on [control=['if'], data=[]]
elif dest == 'F':
#Save to local file
f = open(name, 'wb')
if not f:
self.error('Unable to create output file: ' + name) # depends on [control=['if'], data=[]]
if PY3K:
# manage binary data as latin1 until PEP461 or similar is implemented
f.write(self.buffer.encode('latin1')) # depends on [control=['if'], data=[]]
else:
f.write(self.buffer)
f.close() # depends on [control=['if'], data=[]]
elif dest == 'S':
#Return as a string
return self.buffer # depends on [control=['if'], data=[]]
else:
self.error('Incorrect output destination: ' + dest)
return '' |
def fitSphere(coords):
"""
Fits a sphere to a set of points.
Extra info is stored in ``actor.info['radius']``, ``actor.info['center']``, ``actor.info['residue']``.
.. hint:: Example: |fitspheres1.py|_
|fitspheres2| |fitspheres2.py|_
"""
coords = np.array(coords)
n = len(coords)
A = np.zeros((n, 4))
A[:, :-1] = coords * 2
A[:, 3] = 1
f = np.zeros((n, 1))
x = coords[:, 0]
y = coords[:, 1]
z = coords[:, 2]
f[:, 0] = x * x + y * y + z * z
C, residue, rank, sv = np.linalg.lstsq(A, f) # solve AC=f
if rank < 4:
return None
t = (C[0] * C[0]) + (C[1] * C[1]) + (C[2] * C[2]) + C[3]
radius = np.sqrt(t)[0]
center = np.array([C[0][0], C[1][0], C[2][0]])
if len(residue):
residue = np.sqrt(residue[0]) / n
else:
residue = 0
s = vs.Sphere(center, radius, c="r", alpha=1).wire(1)
s.info["radius"] = radius
s.info["center"] = center
s.info["residue"] = residue
return s | def function[fitSphere, parameter[coords]]:
constant[
Fits a sphere to a set of points.
Extra info is stored in ``actor.info['radius']``, ``actor.info['center']``, ``actor.info['residue']``.
.. hint:: Example: |fitspheres1.py|_
|fitspheres2| |fitspheres2.py|_
]
variable[coords] assign[=] call[name[np].array, parameter[name[coords]]]
variable[n] assign[=] call[name[len], parameter[name[coords]]]
variable[A] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da204622ce0>, <ast.Constant object at 0x7da2046239d0>]]]]
call[name[A]][tuple[[<ast.Slice object at 0x7da2046216c0>, <ast.Slice object at 0x7da204621420>]]] assign[=] binary_operation[name[coords] * constant[2]]
call[name[A]][tuple[[<ast.Slice object at 0x7da204623010>, <ast.Constant object at 0x7da204622ef0>]]] assign[=] constant[1]
variable[f] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da204621870>, <ast.Constant object at 0x7da2046204f0>]]]]
variable[x] assign[=] call[name[coords]][tuple[[<ast.Slice object at 0x7da204621ff0>, <ast.Constant object at 0x7da204621cc0>]]]
variable[y] assign[=] call[name[coords]][tuple[[<ast.Slice object at 0x7da204621fc0>, <ast.Constant object at 0x7da2046205e0>]]]
variable[z] assign[=] call[name[coords]][tuple[[<ast.Slice object at 0x7da2046221d0>, <ast.Constant object at 0x7da204623610>]]]
call[name[f]][tuple[[<ast.Slice object at 0x7da204621c90>, <ast.Constant object at 0x7da204622080>]]] assign[=] binary_operation[binary_operation[binary_operation[name[x] * name[x]] + binary_operation[name[y] * name[y]]] + binary_operation[name[z] * name[z]]]
<ast.Tuple object at 0x7da204620b80> assign[=] call[name[np].linalg.lstsq, parameter[name[A], name[f]]]
if compare[name[rank] less[<] constant[4]] begin[:]
return[constant[None]]
variable[t] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[C]][constant[0]] * call[name[C]][constant[0]]] + binary_operation[call[name[C]][constant[1]] * call[name[C]][constant[1]]]] + binary_operation[call[name[C]][constant[2]] * call[name[C]][constant[2]]]] + call[name[C]][constant[3]]]
variable[radius] assign[=] call[call[name[np].sqrt, parameter[name[t]]]][constant[0]]
variable[center] assign[=] call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da204621150>, <ast.Subscript object at 0x7da204623550>, <ast.Subscript object at 0x7da204622860>]]]]
if call[name[len], parameter[name[residue]]] begin[:]
variable[residue] assign[=] binary_operation[call[name[np].sqrt, parameter[call[name[residue]][constant[0]]]] / name[n]]
variable[s] assign[=] call[call[name[vs].Sphere, parameter[name[center], name[radius]]].wire, parameter[constant[1]]]
call[name[s].info][constant[radius]] assign[=] name[radius]
call[name[s].info][constant[center]] assign[=] name[center]
call[name[s].info][constant[residue]] assign[=] name[residue]
return[name[s]] | keyword[def] identifier[fitSphere] ( identifier[coords] ):
literal[string]
identifier[coords] = identifier[np] . identifier[array] ( identifier[coords] )
identifier[n] = identifier[len] ( identifier[coords] )
identifier[A] = identifier[np] . identifier[zeros] (( identifier[n] , literal[int] ))
identifier[A] [:,:- literal[int] ]= identifier[coords] * literal[int]
identifier[A] [:, literal[int] ]= literal[int]
identifier[f] = identifier[np] . identifier[zeros] (( identifier[n] , literal[int] ))
identifier[x] = identifier[coords] [:, literal[int] ]
identifier[y] = identifier[coords] [:, literal[int] ]
identifier[z] = identifier[coords] [:, literal[int] ]
identifier[f] [:, literal[int] ]= identifier[x] * identifier[x] + identifier[y] * identifier[y] + identifier[z] * identifier[z]
identifier[C] , identifier[residue] , identifier[rank] , identifier[sv] = identifier[np] . identifier[linalg] . identifier[lstsq] ( identifier[A] , identifier[f] )
keyword[if] identifier[rank] < literal[int] :
keyword[return] keyword[None]
identifier[t] =( identifier[C] [ literal[int] ]* identifier[C] [ literal[int] ])+( identifier[C] [ literal[int] ]* identifier[C] [ literal[int] ])+( identifier[C] [ literal[int] ]* identifier[C] [ literal[int] ])+ identifier[C] [ literal[int] ]
identifier[radius] = identifier[np] . identifier[sqrt] ( identifier[t] )[ literal[int] ]
identifier[center] = identifier[np] . identifier[array] ([ identifier[C] [ literal[int] ][ literal[int] ], identifier[C] [ literal[int] ][ literal[int] ], identifier[C] [ literal[int] ][ literal[int] ]])
keyword[if] identifier[len] ( identifier[residue] ):
identifier[residue] = identifier[np] . identifier[sqrt] ( identifier[residue] [ literal[int] ])/ identifier[n]
keyword[else] :
identifier[residue] = literal[int]
identifier[s] = identifier[vs] . identifier[Sphere] ( identifier[center] , identifier[radius] , identifier[c] = literal[string] , identifier[alpha] = literal[int] ). identifier[wire] ( literal[int] )
identifier[s] . identifier[info] [ literal[string] ]= identifier[radius]
identifier[s] . identifier[info] [ literal[string] ]= identifier[center]
identifier[s] . identifier[info] [ literal[string] ]= identifier[residue]
keyword[return] identifier[s] | def fitSphere(coords):
"""
Fits a sphere to a set of points.
Extra info is stored in ``actor.info['radius']``, ``actor.info['center']``, ``actor.info['residue']``.
.. hint:: Example: |fitspheres1.py|_
|fitspheres2| |fitspheres2.py|_
"""
coords = np.array(coords)
n = len(coords)
A = np.zeros((n, 4))
A[:, :-1] = coords * 2
A[:, 3] = 1
f = np.zeros((n, 1))
x = coords[:, 0]
y = coords[:, 1]
z = coords[:, 2]
f[:, 0] = x * x + y * y + z * z
(C, residue, rank, sv) = np.linalg.lstsq(A, f) # solve AC=f
if rank < 4:
return None # depends on [control=['if'], data=[]]
t = C[0] * C[0] + C[1] * C[1] + C[2] * C[2] + C[3]
radius = np.sqrt(t)[0]
center = np.array([C[0][0], C[1][0], C[2][0]])
if len(residue):
residue = np.sqrt(residue[0]) / n # depends on [control=['if'], data=[]]
else:
residue = 0
s = vs.Sphere(center, radius, c='r', alpha=1).wire(1)
s.info['radius'] = radius
s.info['center'] = center
s.info['residue'] = residue
return s |
def development_verify():
"""Populate template and compare to ``DEVELOPMENT.rst``
Raises:
ValueError: If the current ``DEVELOPMENT.rst`` doesn't
agree with the expected value computed from the template.
"""
with open(DEVELOPMENT_TEMPLATE, "r") as file_obj:
template = file_obj.read()
expected = template.format(revision=REVISION, rtd_version=RTD_VERSION)
with open(DEVELOPMENT_FILE, "r") as file_obj:
contents = file_obj.read()
if contents != expected:
err_msg = "\n" + get_diff(
contents,
expected,
"DEVELOPMENT.rst.actual",
"DEVELOPMENT.rst.expected",
)
raise ValueError(err_msg)
else:
print("DEVELOPMENT.rst contents are as expected.") | def function[development_verify, parameter[]]:
constant[Populate template and compare to ``DEVELOPMENT.rst``
Raises:
ValueError: If the current ``DEVELOPMENT.rst`` doesn't
agree with the expected value computed from the template.
]
with call[name[open], parameter[name[DEVELOPMENT_TEMPLATE], constant[r]]] begin[:]
variable[template] assign[=] call[name[file_obj].read, parameter[]]
variable[expected] assign[=] call[name[template].format, parameter[]]
with call[name[open], parameter[name[DEVELOPMENT_FILE], constant[r]]] begin[:]
variable[contents] assign[=] call[name[file_obj].read, parameter[]]
if compare[name[contents] not_equal[!=] name[expected]] begin[:]
variable[err_msg] assign[=] binary_operation[constant[
] + call[name[get_diff], parameter[name[contents], name[expected], constant[DEVELOPMENT.rst.actual], constant[DEVELOPMENT.rst.expected]]]]
<ast.Raise object at 0x7da20e961270> | keyword[def] identifier[development_verify] ():
literal[string]
keyword[with] identifier[open] ( identifier[DEVELOPMENT_TEMPLATE] , literal[string] ) keyword[as] identifier[file_obj] :
identifier[template] = identifier[file_obj] . identifier[read] ()
identifier[expected] = identifier[template] . identifier[format] ( identifier[revision] = identifier[REVISION] , identifier[rtd_version] = identifier[RTD_VERSION] )
keyword[with] identifier[open] ( identifier[DEVELOPMENT_FILE] , literal[string] ) keyword[as] identifier[file_obj] :
identifier[contents] = identifier[file_obj] . identifier[read] ()
keyword[if] identifier[contents] != identifier[expected] :
identifier[err_msg] = literal[string] + identifier[get_diff] (
identifier[contents] ,
identifier[expected] ,
literal[string] ,
literal[string] ,
)
keyword[raise] identifier[ValueError] ( identifier[err_msg] )
keyword[else] :
identifier[print] ( literal[string] ) | def development_verify():
"""Populate template and compare to ``DEVELOPMENT.rst``
Raises:
ValueError: If the current ``DEVELOPMENT.rst`` doesn't
agree with the expected value computed from the template.
"""
with open(DEVELOPMENT_TEMPLATE, 'r') as file_obj:
template = file_obj.read() # depends on [control=['with'], data=['file_obj']]
expected = template.format(revision=REVISION, rtd_version=RTD_VERSION)
with open(DEVELOPMENT_FILE, 'r') as file_obj:
contents = file_obj.read() # depends on [control=['with'], data=['file_obj']]
if contents != expected:
err_msg = '\n' + get_diff(contents, expected, 'DEVELOPMENT.rst.actual', 'DEVELOPMENT.rst.expected')
raise ValueError(err_msg) # depends on [control=['if'], data=['contents', 'expected']]
else:
print('DEVELOPMENT.rst contents are as expected.') |
def url_to_image(url, flag=cv2.IMREAD_COLOR):
""" download the image, convert it to a NumPy array, and then read
it into OpenCV format """
resp = urlopen(url)
image = np.asarray(bytearray(resp.read()), dtype="uint8")
image = cv2.imdecode(image, flag)
return image | def function[url_to_image, parameter[url, flag]]:
constant[ download the image, convert it to a NumPy array, and then read
it into OpenCV format ]
variable[resp] assign[=] call[name[urlopen], parameter[name[url]]]
variable[image] assign[=] call[name[np].asarray, parameter[call[name[bytearray], parameter[call[name[resp].read, parameter[]]]]]]
variable[image] assign[=] call[name[cv2].imdecode, parameter[name[image], name[flag]]]
return[name[image]] | keyword[def] identifier[url_to_image] ( identifier[url] , identifier[flag] = identifier[cv2] . identifier[IMREAD_COLOR] ):
literal[string]
identifier[resp] = identifier[urlopen] ( identifier[url] )
identifier[image] = identifier[np] . identifier[asarray] ( identifier[bytearray] ( identifier[resp] . identifier[read] ()), identifier[dtype] = literal[string] )
identifier[image] = identifier[cv2] . identifier[imdecode] ( identifier[image] , identifier[flag] )
keyword[return] identifier[image] | def url_to_image(url, flag=cv2.IMREAD_COLOR):
""" download the image, convert it to a NumPy array, and then read
it into OpenCV format """
resp = urlopen(url)
image = np.asarray(bytearray(resp.read()), dtype='uint8')
image = cv2.imdecode(image, flag)
return image |
def isValidPublicAddress(address: str) -> bool:
"""Check if address is a valid NEO address"""
valid = False
if len(address) == 34 and address[0] == 'A':
try:
base58.b58decode_check(address.encode())
valid = True
except ValueError:
# checksum mismatch
valid = False
return valid | def function[isValidPublicAddress, parameter[address]]:
constant[Check if address is a valid NEO address]
variable[valid] assign[=] constant[False]
if <ast.BoolOp object at 0x7da2047e9060> begin[:]
<ast.Try object at 0x7da2047ea3e0>
return[name[valid]] | keyword[def] identifier[isValidPublicAddress] ( identifier[address] : identifier[str] )-> identifier[bool] :
literal[string]
identifier[valid] = keyword[False]
keyword[if] identifier[len] ( identifier[address] )== literal[int] keyword[and] identifier[address] [ literal[int] ]== literal[string] :
keyword[try] :
identifier[base58] . identifier[b58decode_check] ( identifier[address] . identifier[encode] ())
identifier[valid] = keyword[True]
keyword[except] identifier[ValueError] :
identifier[valid] = keyword[False]
keyword[return] identifier[valid] | def isValidPublicAddress(address: str) -> bool:
"""Check if address is a valid NEO address"""
valid = False
if len(address) == 34 and address[0] == 'A':
try:
base58.b58decode_check(address.encode())
valid = True # depends on [control=['try'], data=[]]
except ValueError:
# checksum mismatch
valid = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return valid |
def active_version():
"""
Determine the current active version on the server
Just examine the which environment is symlinked
"""
link = '/'.join([deployment_root(),'env',env.project_name])
if not exists(link): return None
active = os.path.split(run('ls -al '+link).split(' -> ')[1])[1]
return active | def function[active_version, parameter[]]:
constant[
Determine the current active version on the server
Just examine the which environment is symlinked
]
variable[link] assign[=] call[constant[/].join, parameter[list[[<ast.Call object at 0x7da204565450>, <ast.Constant object at 0x7da204566290>, <ast.Attribute object at 0x7da204564970>]]]]
if <ast.UnaryOp object at 0x7da204564250> begin[:]
return[constant[None]]
variable[active] assign[=] call[call[name[os].path.split, parameter[call[call[call[name[run], parameter[binary_operation[constant[ls -al ] + name[link]]]].split, parameter[constant[ -> ]]]][constant[1]]]]][constant[1]]
return[name[active]] | keyword[def] identifier[active_version] ():
literal[string]
identifier[link] = literal[string] . identifier[join] ([ identifier[deployment_root] (), literal[string] , identifier[env] . identifier[project_name] ])
keyword[if] keyword[not] identifier[exists] ( identifier[link] ): keyword[return] keyword[None]
identifier[active] = identifier[os] . identifier[path] . identifier[split] ( identifier[run] ( literal[string] + identifier[link] ). identifier[split] ( literal[string] )[ literal[int] ])[ literal[int] ]
keyword[return] identifier[active] | def active_version():
"""
Determine the current active version on the server
Just examine the which environment is symlinked
"""
link = '/'.join([deployment_root(), 'env', env.project_name])
if not exists(link):
return None # depends on [control=['if'], data=[]]
active = os.path.split(run('ls -al ' + link).split(' -> ')[1])[1]
return active |
def get_class(self, module, class_name):
"""try and get the class_name from the module and make sure it is a valid
controller"""
# let's get the class
class_object = getattr(module, class_name, None)
if not class_object or not issubclass(class_object, Controller):
class_object = None
return class_object | def function[get_class, parameter[self, module, class_name]]:
constant[try and get the class_name from the module and make sure it is a valid
controller]
variable[class_object] assign[=] call[name[getattr], parameter[name[module], name[class_name], constant[None]]]
if <ast.BoolOp object at 0x7da18bcc81c0> begin[:]
variable[class_object] assign[=] constant[None]
return[name[class_object]] | keyword[def] identifier[get_class] ( identifier[self] , identifier[module] , identifier[class_name] ):
literal[string]
identifier[class_object] = identifier[getattr] ( identifier[module] , identifier[class_name] , keyword[None] )
keyword[if] keyword[not] identifier[class_object] keyword[or] keyword[not] identifier[issubclass] ( identifier[class_object] , identifier[Controller] ):
identifier[class_object] = keyword[None]
keyword[return] identifier[class_object] | def get_class(self, module, class_name):
"""try and get the class_name from the module and make sure it is a valid
controller"""
# let's get the class
class_object = getattr(module, class_name, None)
if not class_object or not issubclass(class_object, Controller):
class_object = None # depends on [control=['if'], data=[]]
return class_object |
def fetch(self):
"""
Fetch a ExecutionStepContextInstance
:returns: Fetched ExecutionStepContextInstance
:rtype: twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return ExecutionStepContextInstance(
self._version,
payload,
flow_sid=self._solution['flow_sid'],
execution_sid=self._solution['execution_sid'],
step_sid=self._solution['step_sid'],
) | def function[fetch, parameter[self]]:
constant[
Fetch a ExecutionStepContextInstance
:returns: Fetched ExecutionStepContextInstance
:rtype: twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance
]
variable[params] assign[=] call[name[values].of, parameter[dictionary[[], []]]]
variable[payload] assign[=] call[name[self]._version.fetch, parameter[constant[GET], name[self]._uri]]
return[call[name[ExecutionStepContextInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[fetch] ( identifier[self] ):
literal[string]
identifier[params] = identifier[values] . identifier[of] ({})
identifier[payload] = identifier[self] . identifier[_version] . identifier[fetch] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[params] = identifier[params] ,
)
keyword[return] identifier[ExecutionStepContextInstance] (
identifier[self] . identifier[_version] ,
identifier[payload] ,
identifier[flow_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[execution_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[step_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
) | def fetch(self):
"""
Fetch a ExecutionStepContextInstance
:returns: Fetched ExecutionStepContextInstance
:rtype: twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance
"""
params = values.of({})
payload = self._version.fetch('GET', self._uri, params=params)
return ExecutionStepContextInstance(self._version, payload, flow_sid=self._solution['flow_sid'], execution_sid=self._solution['execution_sid'], step_sid=self._solution['step_sid']) |
def getpart(self, ix):
"""
Returns a fileobject for the specified section.
This method optionally decompresses the data found in the .idb file,
and returns a file-like object, with seek, read, tell.
"""
if self.offsets[ix] == 0:
return
comp, ofs, size, checksum = self.getsectioninfo(ix)
fh = FileSection(self.fh, ofs, ofs + size)
if comp == 2:
import zlib
# very old databases used a different compression scheme:
wbits = -15 if self.magic == 'IDA0' else 15
fh = makeStringIO(zlib.decompress(fh.read(size), wbits))
elif comp == 0:
pass
else:
raise Exception("unsupported section encoding: %02x" % comp)
return fh | def function[getpart, parameter[self, ix]]:
constant[
Returns a fileobject for the specified section.
This method optionally decompresses the data found in the .idb file,
and returns a file-like object, with seek, read, tell.
]
if compare[call[name[self].offsets][name[ix]] equal[==] constant[0]] begin[:]
return[None]
<ast.Tuple object at 0x7da18bc73a30> assign[=] call[name[self].getsectioninfo, parameter[name[ix]]]
variable[fh] assign[=] call[name[FileSection], parameter[name[self].fh, name[ofs], binary_operation[name[ofs] + name[size]]]]
if compare[name[comp] equal[==] constant[2]] begin[:]
import module[zlib]
variable[wbits] assign[=] <ast.IfExp object at 0x7da18bc73b80>
variable[fh] assign[=] call[name[makeStringIO], parameter[call[name[zlib].decompress, parameter[call[name[fh].read, parameter[name[size]]], name[wbits]]]]]
return[name[fh]] | keyword[def] identifier[getpart] ( identifier[self] , identifier[ix] ):
literal[string]
keyword[if] identifier[self] . identifier[offsets] [ identifier[ix] ]== literal[int] :
keyword[return]
identifier[comp] , identifier[ofs] , identifier[size] , identifier[checksum] = identifier[self] . identifier[getsectioninfo] ( identifier[ix] )
identifier[fh] = identifier[FileSection] ( identifier[self] . identifier[fh] , identifier[ofs] , identifier[ofs] + identifier[size] )
keyword[if] identifier[comp] == literal[int] :
keyword[import] identifier[zlib]
identifier[wbits] =- literal[int] keyword[if] identifier[self] . identifier[magic] == literal[string] keyword[else] literal[int]
identifier[fh] = identifier[makeStringIO] ( identifier[zlib] . identifier[decompress] ( identifier[fh] . identifier[read] ( identifier[size] ), identifier[wbits] ))
keyword[elif] identifier[comp] == literal[int] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[comp] )
keyword[return] identifier[fh] | def getpart(self, ix):
"""
Returns a fileobject for the specified section.
This method optionally decompresses the data found in the .idb file,
and returns a file-like object, with seek, read, tell.
"""
if self.offsets[ix] == 0:
return # depends on [control=['if'], data=[]]
(comp, ofs, size, checksum) = self.getsectioninfo(ix)
fh = FileSection(self.fh, ofs, ofs + size)
if comp == 2:
import zlib # very old databases used a different compression scheme:
wbits = -15 if self.magic == 'IDA0' else 15
fh = makeStringIO(zlib.decompress(fh.read(size), wbits)) # depends on [control=['if'], data=[]]
elif comp == 0:
pass # depends on [control=['if'], data=[]]
else:
raise Exception('unsupported section encoding: %02x' % comp)
return fh |
def search(self, **kwargs):
"""
Method to search interfaces based on extends search.
:return: Dict containing interfaces.
"""
return super(ApiInterfaceRequest, self).get(self.prepare_url('api/v3/interface/', kwargs)) | def function[search, parameter[self]]:
constant[
Method to search interfaces based on extends search.
:return: Dict containing interfaces.
]
return[call[call[name[super], parameter[name[ApiInterfaceRequest], name[self]]].get, parameter[call[name[self].prepare_url, parameter[constant[api/v3/interface/], name[kwargs]]]]]] | keyword[def] identifier[search] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[super] ( identifier[ApiInterfaceRequest] , identifier[self] ). identifier[get] ( identifier[self] . identifier[prepare_url] ( literal[string] , identifier[kwargs] )) | def search(self, **kwargs):
"""
Method to search interfaces based on extends search.
:return: Dict containing interfaces.
"""
return super(ApiInterfaceRequest, self).get(self.prepare_url('api/v3/interface/', kwargs)) |
def policy_net(rng_key,
batch_observations_shape,
num_actions,
bottom_layers=None):
"""A policy net function."""
# Use the bottom_layers as the bottom part of the network and just add the
# required layers on top of it.
if bottom_layers is None:
bottom_layers = []
# NOTE: The LogSoftmax instead of the Softmax.
bottom_layers.extend([layers.Dense(num_actions), layers.LogSoftmax()])
net = layers.Serial(*bottom_layers)
return net.initialize(batch_observations_shape, rng_key), net | def function[policy_net, parameter[rng_key, batch_observations_shape, num_actions, bottom_layers]]:
constant[A policy net function.]
if compare[name[bottom_layers] is constant[None]] begin[:]
variable[bottom_layers] assign[=] list[[]]
call[name[bottom_layers].extend, parameter[list[[<ast.Call object at 0x7da1b2062560>, <ast.Call object at 0x7da1b2063bb0>]]]]
variable[net] assign[=] call[name[layers].Serial, parameter[<ast.Starred object at 0x7da1b2061450>]]
return[tuple[[<ast.Call object at 0x7da1b20617b0>, <ast.Name object at 0x7da1b2061330>]]] | keyword[def] identifier[policy_net] ( identifier[rng_key] ,
identifier[batch_observations_shape] ,
identifier[num_actions] ,
identifier[bottom_layers] = keyword[None] ):
literal[string]
keyword[if] identifier[bottom_layers] keyword[is] keyword[None] :
identifier[bottom_layers] =[]
identifier[bottom_layers] . identifier[extend] ([ identifier[layers] . identifier[Dense] ( identifier[num_actions] ), identifier[layers] . identifier[LogSoftmax] ()])
identifier[net] = identifier[layers] . identifier[Serial] (* identifier[bottom_layers] )
keyword[return] identifier[net] . identifier[initialize] ( identifier[batch_observations_shape] , identifier[rng_key] ), identifier[net] | def policy_net(rng_key, batch_observations_shape, num_actions, bottom_layers=None):
"""A policy net function."""
# Use the bottom_layers as the bottom part of the network and just add the
# required layers on top of it.
if bottom_layers is None:
bottom_layers = [] # depends on [control=['if'], data=['bottom_layers']]
# NOTE: The LogSoftmax instead of the Softmax.
bottom_layers.extend([layers.Dense(num_actions), layers.LogSoftmax()])
net = layers.Serial(*bottom_layers)
return (net.initialize(batch_observations_shape, rng_key), net) |
def is_image_loaded(webdriver, webelement):
'''
Check if an image (in an image tag) is loaded.
Note: This call will not work against background images. Only Images in <img> tags.
Args:
webelement (WebElement) - WebDriver web element to validate.
'''
script = (u("return arguments[0].complete && type of arguments[0].naturalWidth != \"undefined\" ") +
u("&& arguments[0].naturalWidth > 0"))
try:
return webdriver.execute_script(script, webelement)
except:
return False | def function[is_image_loaded, parameter[webdriver, webelement]]:
constant[
Check if an image (in an image tag) is loaded.
Note: This call will not work against background images. Only Images in <img> tags.
Args:
webelement (WebElement) - WebDriver web element to validate.
]
variable[script] assign[=] binary_operation[call[name[u], parameter[constant[return arguments[0].complete && type of arguments[0].naturalWidth != "undefined" ]]] + call[name[u], parameter[constant[&& arguments[0].naturalWidth > 0]]]]
<ast.Try object at 0x7da18fe919f0> | keyword[def] identifier[is_image_loaded] ( identifier[webdriver] , identifier[webelement] ):
literal[string]
identifier[script] =( identifier[u] ( literal[string] )+
identifier[u] ( literal[string] ))
keyword[try] :
keyword[return] identifier[webdriver] . identifier[execute_script] ( identifier[script] , identifier[webelement] )
keyword[except] :
keyword[return] keyword[False] | def is_image_loaded(webdriver, webelement):
"""
Check if an image (in an image tag) is loaded.
Note: This call will not work against background images. Only Images in <img> tags.
Args:
webelement (WebElement) - WebDriver web element to validate.
"""
script = u('return arguments[0].complete && type of arguments[0].naturalWidth != "undefined" ') + u('&& arguments[0].naturalWidth > 0')
try:
return webdriver.execute_script(script, webelement) # depends on [control=['try'], data=[]]
except:
return False # depends on [control=['except'], data=[]] |
def select_visualization(n):
"""
get viz choice based on numerical index
"""
try:
n = int(n) - 1
test = VISUALIZATIONS_LIST[n] # throw exception if number wrong
return n
except:
printDebug("Invalid viz-type option. Valid options are:", "red")
show_types()
raise SystemExit(1) | def function[select_visualization, parameter[n]]:
constant[
get viz choice based on numerical index
]
<ast.Try object at 0x7da1b1004cd0> | keyword[def] identifier[select_visualization] ( identifier[n] ):
literal[string]
keyword[try] :
identifier[n] = identifier[int] ( identifier[n] )- literal[int]
identifier[test] = identifier[VISUALIZATIONS_LIST] [ identifier[n] ]
keyword[return] identifier[n]
keyword[except] :
identifier[printDebug] ( literal[string] , literal[string] )
identifier[show_types] ()
keyword[raise] identifier[SystemExit] ( literal[int] ) | def select_visualization(n):
"""
get viz choice based on numerical index
"""
try:
n = int(n) - 1
test = VISUALIZATIONS_LIST[n] # throw exception if number wrong
return n # depends on [control=['try'], data=[]]
except:
printDebug('Invalid viz-type option. Valid options are:', 'red')
show_types()
raise SystemExit(1) # depends on [control=['except'], data=[]] |
def _queue_into_buffer(transfersession):
"""
Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance.
"""
last_saved_by_conditions = []
filter_prefixes = Filter(transfersession.filter)
server_fsic = json.loads(transfersession.server_fsic)
client_fsic = json.loads(transfersession.client_fsic)
if transfersession.push:
fsics = _fsic_queuing_calc(client_fsic, server_fsic)
else:
fsics = _fsic_queuing_calc(server_fsic, client_fsic)
# if fsics are identical or receiving end has newer data, then there is nothing to queue
if not fsics:
return
# create condition for all push FSICs where instance_ids are equal, but internal counters are higher than FSICs counters
for instance, counter in six.iteritems(fsics):
last_saved_by_conditions += ["(last_saved_instance = '{0}' AND last_saved_counter > {1})".format(instance, counter)]
if fsics:
last_saved_by_conditions = [_join_with_logical_operator(last_saved_by_conditions, 'OR')]
partition_conditions = []
# create condition for filtering by partitions
for prefix in filter_prefixes:
partition_conditions += ["partition LIKE '{}%'".format(prefix)]
if filter_prefixes:
partition_conditions = [_join_with_logical_operator(partition_conditions, 'OR')]
# combine conditions
fsic_and_partition_conditions = _join_with_logical_operator(last_saved_by_conditions + partition_conditions, 'AND')
# filter by profile
where_condition = _join_with_logical_operator([fsic_and_partition_conditions, "profile = '{}'".format(transfersession.sync_session.profile)], 'AND')
# execute raw sql to take all records that match condition, to be put into buffer for transfer
with connection.cursor() as cursor:
queue_buffer = """INSERT INTO {outgoing_buffer}
(model_uuid, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted,
model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk)
SELECT id, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk
FROM {store} WHERE {condition}""".format(outgoing_buffer=Buffer._meta.db_table,
transfer_session_id=transfersession.id,
condition=where_condition,
store=Store._meta.db_table)
cursor.execute(queue_buffer)
# take all record max counters that are foreign keyed onto store models, which were queued into the buffer
queue_rmc_buffer = """INSERT INTO {outgoing_rmcb}
(instance_id, counter, transfer_session_id, model_uuid)
SELECT instance_id, counter, '{transfer_session_id}', store_model_id
FROM {record_max_counter} AS rmc
INNER JOIN {outgoing_buffer} AS buffer ON rmc.store_model_id = buffer.model_uuid
WHERE buffer.transfer_session_id = '{transfer_session_id}'
""".format(outgoing_rmcb=RecordMaxCounterBuffer._meta.db_table,
transfer_session_id=transfersession.id,
record_max_counter=RecordMaxCounter._meta.db_table,
outgoing_buffer=Buffer._meta.db_table)
cursor.execute(queue_rmc_buffer) | def function[_queue_into_buffer, parameter[transfersession]]:
constant[
Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance.
]
variable[last_saved_by_conditions] assign[=] list[[]]
variable[filter_prefixes] assign[=] call[name[Filter], parameter[name[transfersession].filter]]
variable[server_fsic] assign[=] call[name[json].loads, parameter[name[transfersession].server_fsic]]
variable[client_fsic] assign[=] call[name[json].loads, parameter[name[transfersession].client_fsic]]
if name[transfersession].push begin[:]
variable[fsics] assign[=] call[name[_fsic_queuing_calc], parameter[name[client_fsic], name[server_fsic]]]
if <ast.UnaryOp object at 0x7da18f00d060> begin[:]
return[None]
for taget[tuple[[<ast.Name object at 0x7da18f00f670>, <ast.Name object at 0x7da18f00fc10>]]] in starred[call[name[six].iteritems, parameter[name[fsics]]]] begin[:]
<ast.AugAssign object at 0x7da18f00d2a0>
if name[fsics] begin[:]
variable[last_saved_by_conditions] assign[=] list[[<ast.Call object at 0x7da18f00cb80>]]
variable[partition_conditions] assign[=] list[[]]
for taget[name[prefix]] in starred[name[filter_prefixes]] begin[:]
<ast.AugAssign object at 0x7da18f00ca00>
if name[filter_prefixes] begin[:]
variable[partition_conditions] assign[=] list[[<ast.Call object at 0x7da20c794580>]]
variable[fsic_and_partition_conditions] assign[=] call[name[_join_with_logical_operator], parameter[binary_operation[name[last_saved_by_conditions] + name[partition_conditions]], constant[AND]]]
variable[where_condition] assign[=] call[name[_join_with_logical_operator], parameter[list[[<ast.Name object at 0x7da1b00f9450>, <ast.Call object at 0x7da1b00fad70>]], constant[AND]]]
with call[name[connection].cursor, parameter[]] begin[:]
variable[queue_buffer] assign[=] call[constant[INSERT INTO {outgoing_buffer}
(model_uuid, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted,
model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk)
SELECT id, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk
FROM {store} WHERE {condition}].format, parameter[]]
call[name[cursor].execute, parameter[name[queue_buffer]]]
variable[queue_rmc_buffer] assign[=] call[constant[INSERT INTO {outgoing_rmcb}
(instance_id, counter, transfer_session_id, model_uuid)
SELECT instance_id, counter, '{transfer_session_id}', store_model_id
FROM {record_max_counter} AS rmc
INNER JOIN {outgoing_buffer} AS buffer ON rmc.store_model_id = buffer.model_uuid
WHERE buffer.transfer_session_id = '{transfer_session_id}'
].format, parameter[]]
call[name[cursor].execute, parameter[name[queue_rmc_buffer]]] | keyword[def] identifier[_queue_into_buffer] ( identifier[transfersession] ):
literal[string]
identifier[last_saved_by_conditions] =[]
identifier[filter_prefixes] = identifier[Filter] ( identifier[transfersession] . identifier[filter] )
identifier[server_fsic] = identifier[json] . identifier[loads] ( identifier[transfersession] . identifier[server_fsic] )
identifier[client_fsic] = identifier[json] . identifier[loads] ( identifier[transfersession] . identifier[client_fsic] )
keyword[if] identifier[transfersession] . identifier[push] :
identifier[fsics] = identifier[_fsic_queuing_calc] ( identifier[client_fsic] , identifier[server_fsic] )
keyword[else] :
identifier[fsics] = identifier[_fsic_queuing_calc] ( identifier[server_fsic] , identifier[client_fsic] )
keyword[if] keyword[not] identifier[fsics] :
keyword[return]
keyword[for] identifier[instance] , identifier[counter] keyword[in] identifier[six] . identifier[iteritems] ( identifier[fsics] ):
identifier[last_saved_by_conditions] +=[ literal[string] . identifier[format] ( identifier[instance] , identifier[counter] )]
keyword[if] identifier[fsics] :
identifier[last_saved_by_conditions] =[ identifier[_join_with_logical_operator] ( identifier[last_saved_by_conditions] , literal[string] )]
identifier[partition_conditions] =[]
keyword[for] identifier[prefix] keyword[in] identifier[filter_prefixes] :
identifier[partition_conditions] +=[ literal[string] . identifier[format] ( identifier[prefix] )]
keyword[if] identifier[filter_prefixes] :
identifier[partition_conditions] =[ identifier[_join_with_logical_operator] ( identifier[partition_conditions] , literal[string] )]
identifier[fsic_and_partition_conditions] = identifier[_join_with_logical_operator] ( identifier[last_saved_by_conditions] + identifier[partition_conditions] , literal[string] )
identifier[where_condition] = identifier[_join_with_logical_operator] ([ identifier[fsic_and_partition_conditions] , literal[string] . identifier[format] ( identifier[transfersession] . identifier[sync_session] . identifier[profile] )], literal[string] )
keyword[with] identifier[connection] . identifier[cursor] () keyword[as] identifier[cursor] :
identifier[queue_buffer] = literal[string] . identifier[format] ( identifier[outgoing_buffer] = identifier[Buffer] . identifier[_meta] . identifier[db_table] ,
identifier[transfer_session_id] = identifier[transfersession] . identifier[id] ,
identifier[condition] = identifier[where_condition] ,
identifier[store] = identifier[Store] . identifier[_meta] . identifier[db_table] )
identifier[cursor] . identifier[execute] ( identifier[queue_buffer] )
identifier[queue_rmc_buffer] = literal[string] . identifier[format] ( identifier[outgoing_rmcb] = identifier[RecordMaxCounterBuffer] . identifier[_meta] . identifier[db_table] ,
identifier[transfer_session_id] = identifier[transfersession] . identifier[id] ,
identifier[record_max_counter] = identifier[RecordMaxCounter] . identifier[_meta] . identifier[db_table] ,
identifier[outgoing_buffer] = identifier[Buffer] . identifier[_meta] . identifier[db_table] )
identifier[cursor] . identifier[execute] ( identifier[queue_rmc_buffer] ) | def _queue_into_buffer(transfersession):
"""
Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance.
"""
last_saved_by_conditions = []
filter_prefixes = Filter(transfersession.filter)
server_fsic = json.loads(transfersession.server_fsic)
client_fsic = json.loads(transfersession.client_fsic)
if transfersession.push:
fsics = _fsic_queuing_calc(client_fsic, server_fsic) # depends on [control=['if'], data=[]]
else:
fsics = _fsic_queuing_calc(server_fsic, client_fsic)
# if fsics are identical or receiving end has newer data, then there is nothing to queue
if not fsics:
return # depends on [control=['if'], data=[]]
# create condition for all push FSICs where instance_ids are equal, but internal counters are higher than FSICs counters
for (instance, counter) in six.iteritems(fsics):
last_saved_by_conditions += ["(last_saved_instance = '{0}' AND last_saved_counter > {1})".format(instance, counter)] # depends on [control=['for'], data=[]]
if fsics:
last_saved_by_conditions = [_join_with_logical_operator(last_saved_by_conditions, 'OR')] # depends on [control=['if'], data=[]]
partition_conditions = []
# create condition for filtering by partitions
for prefix in filter_prefixes:
partition_conditions += ["partition LIKE '{}%'".format(prefix)] # depends on [control=['for'], data=['prefix']]
if filter_prefixes:
partition_conditions = [_join_with_logical_operator(partition_conditions, 'OR')] # depends on [control=['if'], data=[]]
# combine conditions
fsic_and_partition_conditions = _join_with_logical_operator(last_saved_by_conditions + partition_conditions, 'AND')
# filter by profile
where_condition = _join_with_logical_operator([fsic_and_partition_conditions, "profile = '{}'".format(transfersession.sync_session.profile)], 'AND')
# execute raw sql to take all records that match condition, to be put into buffer for transfer
with connection.cursor() as cursor:
queue_buffer = "INSERT INTO {outgoing_buffer}\n (model_uuid, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted,\n model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk)\n SELECT id, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk\n FROM {store} WHERE {condition}".format(outgoing_buffer=Buffer._meta.db_table, transfer_session_id=transfersession.id, condition=where_condition, store=Store._meta.db_table)
cursor.execute(queue_buffer)
# take all record max counters that are foreign keyed onto store models, which were queued into the buffer
queue_rmc_buffer = "INSERT INTO {outgoing_rmcb}\n (instance_id, counter, transfer_session_id, model_uuid)\n SELECT instance_id, counter, '{transfer_session_id}', store_model_id\n FROM {record_max_counter} AS rmc\n INNER JOIN {outgoing_buffer} AS buffer ON rmc.store_model_id = buffer.model_uuid\n WHERE buffer.transfer_session_id = '{transfer_session_id}'\n ".format(outgoing_rmcb=RecordMaxCounterBuffer._meta.db_table, transfer_session_id=transfersession.id, record_max_counter=RecordMaxCounter._meta.db_table, outgoing_buffer=Buffer._meta.db_table)
cursor.execute(queue_rmc_buffer) # depends on [control=['with'], data=['cursor']] |
def _create_alpha(self, data, fill_value=None):
"""Create an alpha band DataArray object.
If `fill_value` is provided and input data is an integer type
then it is used to determine invalid "null" pixels instead of
xarray's `isnull` and `notnull` methods.
The returned array is 1 where data is valid, 0 where invalid.
"""
not_alpha = [b for b in data.coords['bands'].values if b != 'A']
null_mask = data.sel(bands=not_alpha)
if np.issubdtype(data.dtype, np.integer) and fill_value is not None:
null_mask = null_mask != fill_value
else:
null_mask = null_mask.notnull()
# if any of the bands are valid, we don't want transparency
null_mask = null_mask.any(dim='bands')
null_mask = null_mask.expand_dims('bands')
null_mask['bands'] = ['A']
# match data dtype
return null_mask | def function[_create_alpha, parameter[self, data, fill_value]]:
constant[Create an alpha band DataArray object.
If `fill_value` is provided and input data is an integer type
then it is used to determine invalid "null" pixels instead of
xarray's `isnull` and `notnull` methods.
The returned array is 1 where data is valid, 0 where invalid.
]
variable[not_alpha] assign[=] <ast.ListComp object at 0x7da20c6aa7d0>
variable[null_mask] assign[=] call[name[data].sel, parameter[]]
if <ast.BoolOp object at 0x7da20c6a8460> begin[:]
variable[null_mask] assign[=] compare[name[null_mask] not_equal[!=] name[fill_value]]
variable[null_mask] assign[=] call[name[null_mask].any, parameter[]]
variable[null_mask] assign[=] call[name[null_mask].expand_dims, parameter[constant[bands]]]
call[name[null_mask]][constant[bands]] assign[=] list[[<ast.Constant object at 0x7da20c6aac20>]]
return[name[null_mask]] | keyword[def] identifier[_create_alpha] ( identifier[self] , identifier[data] , identifier[fill_value] = keyword[None] ):
literal[string]
identifier[not_alpha] =[ identifier[b] keyword[for] identifier[b] keyword[in] identifier[data] . identifier[coords] [ literal[string] ]. identifier[values] keyword[if] identifier[b] != literal[string] ]
identifier[null_mask] = identifier[data] . identifier[sel] ( identifier[bands] = identifier[not_alpha] )
keyword[if] identifier[np] . identifier[issubdtype] ( identifier[data] . identifier[dtype] , identifier[np] . identifier[integer] ) keyword[and] identifier[fill_value] keyword[is] keyword[not] keyword[None] :
identifier[null_mask] = identifier[null_mask] != identifier[fill_value]
keyword[else] :
identifier[null_mask] = identifier[null_mask] . identifier[notnull] ()
identifier[null_mask] = identifier[null_mask] . identifier[any] ( identifier[dim] = literal[string] )
identifier[null_mask] = identifier[null_mask] . identifier[expand_dims] ( literal[string] )
identifier[null_mask] [ literal[string] ]=[ literal[string] ]
keyword[return] identifier[null_mask] | def _create_alpha(self, data, fill_value=None):
"""Create an alpha band DataArray object.
If `fill_value` is provided and input data is an integer type
then it is used to determine invalid "null" pixels instead of
xarray's `isnull` and `notnull` methods.
The returned array is 1 where data is valid, 0 where invalid.
"""
not_alpha = [b for b in data.coords['bands'].values if b != 'A']
null_mask = data.sel(bands=not_alpha)
if np.issubdtype(data.dtype, np.integer) and fill_value is not None:
null_mask = null_mask != fill_value # depends on [control=['if'], data=[]]
else:
null_mask = null_mask.notnull()
# if any of the bands are valid, we don't want transparency
null_mask = null_mask.any(dim='bands')
null_mask = null_mask.expand_dims('bands')
null_mask['bands'] = ['A']
# match data dtype
return null_mask |
def _normalize_string(self, text):
'''Prepares incoming text for parsing:
removes excessive spaces, tabs, newlines, etc.
'''
conversion = {
# newlines
'\r?\n': ' ',
# replace excessive empty spaces
'\s+': ' ',
# convert all types of hyphens/dashes to a
# simple old-school dash
# from http://utf8-chartable.de/unicode-utf8-table.pl?
# start=8192&number=128&utf8=string-literal
'‐': '-',
'‑': '-',
'‒': '-',
'–': '-',
'—': '-',
'―': '-',
}
for find, replace in six.iteritems(conversion):
text = re.sub(find, replace, text, flags=re.UNICODE)
return text | def function[_normalize_string, parameter[self, text]]:
constant[Prepares incoming text for parsing:
removes excessive spaces, tabs, newlines, etc.
]
variable[conversion] assign[=] dictionary[[<ast.Constant object at 0x7da1b28d5e10>, <ast.Constant object at 0x7da1b28d62c0>, <ast.Constant object at 0x7da1b28d5cc0>, <ast.Constant object at 0x7da1b28d6530>, <ast.Constant object at 0x7da1b28d6050>, <ast.Constant object at 0x7da1b27e3700>, <ast.Constant object at 0x7da1b27e27a0>, <ast.Constant object at 0x7da1b27e2e60>], [<ast.Constant object at 0x7da1b27e1b40>, <ast.Constant object at 0x7da1b2865870>, <ast.Constant object at 0x7da1b2867a30>, <ast.Constant object at 0x7da1b28645e0>, <ast.Constant object at 0x7da1b28739d0>, <ast.Constant object at 0x7da1b2872b90>, <ast.Constant object at 0x7da1b2872440>, <ast.Constant object at 0x7da1b2873cd0>]]
for taget[tuple[[<ast.Name object at 0x7da1b2871f00>, <ast.Name object at 0x7da1b28709d0>]]] in starred[call[name[six].iteritems, parameter[name[conversion]]]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[find], name[replace], name[text]]]
return[name[text]] | keyword[def] identifier[_normalize_string] ( identifier[self] , identifier[text] ):
literal[string]
identifier[conversion] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[for] identifier[find] , identifier[replace] keyword[in] identifier[six] . identifier[iteritems] ( identifier[conversion] ):
identifier[text] = identifier[re] . identifier[sub] ( identifier[find] , identifier[replace] , identifier[text] , identifier[flags] = identifier[re] . identifier[UNICODE] )
keyword[return] identifier[text] | def _normalize_string(self, text):
"""Prepares incoming text for parsing:
removes excessive spaces, tabs, newlines, etc.
"""
# newlines
# replace excessive empty spaces
# convert all types of hyphens/dashes to a
# simple old-school dash
# from http://utf8-chartable.de/unicode-utf8-table.pl?
# start=8192&number=128&utf8=string-literal
conversion = {'\r?\n': ' ', '\\s+': ' ', '‐': '-', '‑': '-', '‒': '-', '–': '-', '—': '-', '―': '-'}
for (find, replace) in six.iteritems(conversion):
text = re.sub(find, replace, text, flags=re.UNICODE) # depends on [control=['for'], data=[]]
return text |
def get_thing(self, idx):
"""
Get the thing at the given index.
idx -- the index
"""
try:
idx = int(idx)
except ValueError:
return None
if idx < 0 or idx >= len(self.things):
return None
return self.things[idx] | def function[get_thing, parameter[self, idx]]:
constant[
Get the thing at the given index.
idx -- the index
]
<ast.Try object at 0x7da1b02f1870>
if <ast.BoolOp object at 0x7da1b02f0b20> begin[:]
return[constant[None]]
return[call[name[self].things][name[idx]]] | keyword[def] identifier[get_thing] ( identifier[self] , identifier[idx] ):
literal[string]
keyword[try] :
identifier[idx] = identifier[int] ( identifier[idx] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[None]
keyword[if] identifier[idx] < literal[int] keyword[or] identifier[idx] >= identifier[len] ( identifier[self] . identifier[things] ):
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[things] [ identifier[idx] ] | def get_thing(self, idx):
"""
Get the thing at the given index.
idx -- the index
"""
try:
idx = int(idx) # depends on [control=['try'], data=[]]
except ValueError:
return None # depends on [control=['except'], data=[]]
if idx < 0 or idx >= len(self.things):
return None # depends on [control=['if'], data=[]]
return self.things[idx] |
def eci2ecef (x, y, z, gmst=None):
"""Converts the given ECI coordinates to ECEF at the given Greenwich
Mean Sidereal Time (GMST) (defaults to now).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc
"""
if gmst is None:
gmst = dmc.toGMST()
X = (x * math.cos(gmst)) + (y * math.sin(gmst))
Y = (x * (-math.sin(gmst))) + (y * math.cos(gmst))
Z = z
return X, Y, Z | def function[eci2ecef, parameter[x, y, z, gmst]]:
constant[Converts the given ECI coordinates to ECEF at the given Greenwich
Mean Sidereal Time (GMST) (defaults to now).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc
]
if compare[name[gmst] is constant[None]] begin[:]
variable[gmst] assign[=] call[name[dmc].toGMST, parameter[]]
variable[X] assign[=] binary_operation[binary_operation[name[x] * call[name[math].cos, parameter[name[gmst]]]] + binary_operation[name[y] * call[name[math].sin, parameter[name[gmst]]]]]
variable[Y] assign[=] binary_operation[binary_operation[name[x] * <ast.UnaryOp object at 0x7da18dc99fc0>] + binary_operation[name[y] * call[name[math].cos, parameter[name[gmst]]]]]
variable[Z] assign[=] name[z]
return[tuple[[<ast.Name object at 0x7da18dc98970>, <ast.Name object at 0x7da18dc98520>, <ast.Name object at 0x7da18dc99d50>]]] | keyword[def] identifier[eci2ecef] ( identifier[x] , identifier[y] , identifier[z] , identifier[gmst] = keyword[None] ):
literal[string]
keyword[if] identifier[gmst] keyword[is] keyword[None] :
identifier[gmst] = identifier[dmc] . identifier[toGMST] ()
identifier[X] =( identifier[x] * identifier[math] . identifier[cos] ( identifier[gmst] ))+( identifier[y] * identifier[math] . identifier[sin] ( identifier[gmst] ))
identifier[Y] =( identifier[x] *(- identifier[math] . identifier[sin] ( identifier[gmst] )))+( identifier[y] * identifier[math] . identifier[cos] ( identifier[gmst] ))
identifier[Z] = identifier[z]
keyword[return] identifier[X] , identifier[Y] , identifier[Z] | def eci2ecef(x, y, z, gmst=None):
"""Converts the given ECI coordinates to ECEF at the given Greenwich
Mean Sidereal Time (GMST) (defaults to now).
This code was adapted from
`shashwatak/satellite-js <https://github.com/shashwatak/satellite-js/blob/master/src/coordinate-transforms.js>`_
and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc
"""
if gmst is None:
gmst = dmc.toGMST() # depends on [control=['if'], data=['gmst']]
X = x * math.cos(gmst) + y * math.sin(gmst)
Y = x * -math.sin(gmst) + y * math.cos(gmst)
Z = z
return (X, Y, Z) |
def check_python_architecture(pythondir, target_arch_str):
"""
functions check architecture of target python
"""
pyth_str = subprocess.check_output(
[pythondir + 'python', '-c',
'import platform; print platform.architecture()[0]'])
if pyth_str[:2] != target_arch_str:
raise Exception(
"Wrong architecture of target python. Expected arch is"
+ target_arch_str) | def function[check_python_architecture, parameter[pythondir, target_arch_str]]:
constant[
functions check architecture of target python
]
variable[pyth_str] assign[=] call[name[subprocess].check_output, parameter[list[[<ast.BinOp object at 0x7da1b2218880>, <ast.Constant object at 0x7da1b2218190>, <ast.Constant object at 0x7da1b221aa70>]]]]
if compare[call[name[pyth_str]][<ast.Slice object at 0x7da1b221b130>] not_equal[!=] name[target_arch_str]] begin[:]
<ast.Raise object at 0x7da1b221a110> | keyword[def] identifier[check_python_architecture] ( identifier[pythondir] , identifier[target_arch_str] ):
literal[string]
identifier[pyth_str] = identifier[subprocess] . identifier[check_output] (
[ identifier[pythondir] + literal[string] , literal[string] ,
literal[string] ])
keyword[if] identifier[pyth_str] [: literal[int] ]!= identifier[target_arch_str] :
keyword[raise] identifier[Exception] (
literal[string]
+ identifier[target_arch_str] ) | def check_python_architecture(pythondir, target_arch_str):
"""
functions check architecture of target python
"""
pyth_str = subprocess.check_output([pythondir + 'python', '-c', 'import platform; print platform.architecture()[0]'])
if pyth_str[:2] != target_arch_str:
raise Exception('Wrong architecture of target python. Expected arch is' + target_arch_str) # depends on [control=['if'], data=['target_arch_str']] |
def random(cls, engine_or_session, limit=5):
"""
Return random ORM instance.
:type engine_or_session: Union[Engine, Session]
:type limit: int
:rtype: List[ExtendedBase]
"""
ses, auto_close = ensure_session(engine_or_session)
result = ses.query(cls).order_by(func.random()).limit(limit).all()
if auto_close: # pragma: no cover
ses.close()
return result | def function[random, parameter[cls, engine_or_session, limit]]:
constant[
Return random ORM instance.
:type engine_or_session: Union[Engine, Session]
:type limit: int
:rtype: List[ExtendedBase]
]
<ast.Tuple object at 0x7da2044c2d40> assign[=] call[name[ensure_session], parameter[name[engine_or_session]]]
variable[result] assign[=] call[call[call[call[name[ses].query, parameter[name[cls]]].order_by, parameter[call[name[func].random, parameter[]]]].limit, parameter[name[limit]]].all, parameter[]]
if name[auto_close] begin[:]
call[name[ses].close, parameter[]]
return[name[result]] | keyword[def] identifier[random] ( identifier[cls] , identifier[engine_or_session] , identifier[limit] = literal[int] ):
literal[string]
identifier[ses] , identifier[auto_close] = identifier[ensure_session] ( identifier[engine_or_session] )
identifier[result] = identifier[ses] . identifier[query] ( identifier[cls] ). identifier[order_by] ( identifier[func] . identifier[random] ()). identifier[limit] ( identifier[limit] ). identifier[all] ()
keyword[if] identifier[auto_close] :
identifier[ses] . identifier[close] ()
keyword[return] identifier[result] | def random(cls, engine_or_session, limit=5):
"""
Return random ORM instance.
:type engine_or_session: Union[Engine, Session]
:type limit: int
:rtype: List[ExtendedBase]
"""
(ses, auto_close) = ensure_session(engine_or_session)
result = ses.query(cls).order_by(func.random()).limit(limit).all()
if auto_close: # pragma: no cover
ses.close() # depends on [control=['if'], data=[]]
return result |
def typelogged_module(md):
"""Works like typelogged, but is only applicable to modules by explicit call).
md must be a module or a module name contained in sys.modules.
"""
if not pytypes.typelogging_enabled:
return md
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md].append(typelogged_module)
return md
assert(ismodule(md))
if md.__name__ in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md.__name__].append(typelogged_module)
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
assert(ismodule(md))
if md.__name__ in _fully_typelogged_modules and \
_fully_typelogged_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if _check_as_func(memb) and memb.__module__ == md.__name__:
setattr(md, key, typelogged_func(memb))
elif isclass(memb) and memb.__module__ == md.__name__:
typelogged_class(memb)
if not md.__name__ in pytypes.typechecker._pending_modules:
_fully_typelogged_modules[md.__name__] = len(md.__dict__)
return md | def function[typelogged_module, parameter[md]]:
constant[Works like typelogged, but is only applicable to modules by explicit call).
md must be a module or a module name contained in sys.modules.
]
if <ast.UnaryOp object at 0x7da1b0dfb940> begin[:]
return[name[md]]
if call[name[isinstance], parameter[name[md], name[str]]] begin[:]
if compare[name[md] in name[sys].modules] begin[:]
variable[md] assign[=] call[name[sys].modules][name[md]]
if compare[name[md] is constant[None]] begin[:]
return[name[md]]
assert[call[name[ismodule], parameter[name[md]]]]
if compare[name[md].__name__ in name[pytypes].typechecker._pending_modules] begin[:]
call[call[name[pytypes].typechecker._pending_modules][name[md].__name__].append, parameter[name[typelogged_module]]]
assert[call[name[ismodule], parameter[name[md]]]]
if <ast.BoolOp object at 0x7da1b0df8250> begin[:]
return[name[md]]
variable[keys] assign[=] <ast.ListComp object at 0x7da1b0df85e0>
for taget[name[key]] in starred[name[keys]] begin[:]
variable[memb] assign[=] call[name[md].__dict__][name[key]]
if <ast.BoolOp object at 0x7da1b0df8910> begin[:]
call[name[setattr], parameter[name[md], name[key], call[name[typelogged_func], parameter[name[memb]]]]]
if <ast.UnaryOp object at 0x7da1b0df98d0> begin[:]
call[name[_fully_typelogged_modules]][name[md].__name__] assign[=] call[name[len], parameter[name[md].__dict__]]
return[name[md]] | keyword[def] identifier[typelogged_module] ( identifier[md] ):
literal[string]
keyword[if] keyword[not] identifier[pytypes] . identifier[typelogging_enabled] :
keyword[return] identifier[md]
keyword[if] identifier[isinstance] ( identifier[md] , identifier[str] ):
keyword[if] identifier[md] keyword[in] identifier[sys] . identifier[modules] :
identifier[md] = identifier[sys] . identifier[modules] [ identifier[md] ]
keyword[if] identifier[md] keyword[is] keyword[None] :
keyword[return] identifier[md]
keyword[elif] identifier[md] keyword[in] identifier[pytypes] . identifier[typechecker] . identifier[_pending_modules] :
identifier[pytypes] . identifier[typechecker] . identifier[_pending_modules] [ identifier[md] ]. identifier[append] ( identifier[typelogged_module] )
keyword[return] identifier[md]
keyword[assert] ( identifier[ismodule] ( identifier[md] ))
keyword[if] identifier[md] . identifier[__name__] keyword[in] identifier[pytypes] . identifier[typechecker] . identifier[_pending_modules] :
identifier[pytypes] . identifier[typechecker] . identifier[_pending_modules] [ identifier[md] . identifier[__name__] ]. identifier[append] ( identifier[typelogged_module] )
keyword[assert] ( identifier[ismodule] ( identifier[md] ))
keyword[if] identifier[md] . identifier[__name__] keyword[in] identifier[_fully_typelogged_modules] keyword[and] identifier[_fully_typelogged_modules] [ identifier[md] . identifier[__name__] ]== identifier[len] ( identifier[md] . identifier[__dict__] ):
keyword[return] identifier[md]
identifier[keys] =[ identifier[key] keyword[for] identifier[key] keyword[in] identifier[md] . identifier[__dict__] ]
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[memb] = identifier[md] . identifier[__dict__] [ identifier[key] ]
keyword[if] identifier[_check_as_func] ( identifier[memb] ) keyword[and] identifier[memb] . identifier[__module__] == identifier[md] . identifier[__name__] :
identifier[setattr] ( identifier[md] , identifier[key] , identifier[typelogged_func] ( identifier[memb] ))
keyword[elif] identifier[isclass] ( identifier[memb] ) keyword[and] identifier[memb] . identifier[__module__] == identifier[md] . identifier[__name__] :
identifier[typelogged_class] ( identifier[memb] )
keyword[if] keyword[not] identifier[md] . identifier[__name__] keyword[in] identifier[pytypes] . identifier[typechecker] . identifier[_pending_modules] :
identifier[_fully_typelogged_modules] [ identifier[md] . identifier[__name__] ]= identifier[len] ( identifier[md] . identifier[__dict__] )
keyword[return] identifier[md] | def typelogged_module(md):
"""Works like typelogged, but is only applicable to modules by explicit call).
md must be a module or a module name contained in sys.modules.
"""
if not pytypes.typelogging_enabled:
return md # depends on [control=['if'], data=[]]
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md # depends on [control=['if'], data=['md']] # depends on [control=['if'], data=['md']]
elif md in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md].append(typelogged_module)
return md # depends on [control=['if'], data=['md']] # depends on [control=['if'], data=[]]
assert ismodule(md)
if md.__name__ in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md.__name__].append(typelogged_module) # depends on [control=['if'], data=[]]
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
assert ismodule(md)
if md.__name__ in _fully_typelogged_modules and _fully_typelogged_modules[md.__name__] == len(md.__dict__):
return md # depends on [control=['if'], data=[]]
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if _check_as_func(memb) and memb.__module__ == md.__name__:
setattr(md, key, typelogged_func(memb)) # depends on [control=['if'], data=[]]
elif isclass(memb) and memb.__module__ == md.__name__:
typelogged_class(memb) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
if not md.__name__ in pytypes.typechecker._pending_modules:
_fully_typelogged_modules[md.__name__] = len(md.__dict__) # depends on [control=['if'], data=[]]
return md |
def get_md5(self):
'''Get or calculate MD5 value of the local file.'''
if self.md5 is None:
self.md5 = self.file_hash(self.filename)
return self.md5 | def function[get_md5, parameter[self]]:
constant[Get or calculate MD5 value of the local file.]
if compare[name[self].md5 is constant[None]] begin[:]
name[self].md5 assign[=] call[name[self].file_hash, parameter[name[self].filename]]
return[name[self].md5] | keyword[def] identifier[get_md5] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[md5] keyword[is] keyword[None] :
identifier[self] . identifier[md5] = identifier[self] . identifier[file_hash] ( identifier[self] . identifier[filename] )
keyword[return] identifier[self] . identifier[md5] | def get_md5(self):
"""Get or calculate MD5 value of the local file."""
if self.md5 is None:
self.md5 = self.file_hash(self.filename) # depends on [control=['if'], data=[]]
return self.md5 |
def plot_eval_results(eval_results, metric=None, xaxislabel=None, yaxislabel=None,
title=None, title_fontsize='x-large', axes_title_fontsize='large',
show_metric_direction=True, metric_direction_font_size='large',
subplots_opts=None, subplots_adjust_opts=None, figsize='auto',
**fig_kwargs):
"""
Plot the evaluation results from `eval_results`. `eval_results` must be a sequence containing `(param, values)`
tuples, where `param` is the parameter value to appear on the x axis and `values` can be a dict structure
containing the metric values. `eval_results` can be created using the `results_by_parameter` function from the
`topicmod.common` module.
Set `metric` to plot only a specific metric.
Set `xaxislabel` for a label on the x-axis.
Set `yaxislabel` for a label on the y-axis.
Set `title` for a plot title.
Options in a dict `subplots_opts` will be passed to `plt.subplots(...)`.
Options in a dict `subplots_adjust_opts` will be passed to `fig.subplots_adjust(...)`.
`figsize` can be set to a tuple `(width, height)` or to `"auto"` (default) which will set the size to
`(8, 2 * <num. of metrics>)`.
"""
if type(eval_results) not in (list, tuple) or not eval_results:
raise ValueError('`eval_results` must be a list or tuple with at least one element')
if type(eval_results[0]) not in (list, tuple) or len(eval_results[0]) != 2:
raise ValueError('`eval_results` must be a list or tuple containing a (param, values) tuple. '
'Maybe `eval_results` must be converted with `results_by_parameter`.')
if metric is not None and type(metric) not in (list, tuple):
metric = [metric]
elif metric is None:
# remove special evaluation result 'model': the calculated model itself
metric = list(set(next(iter(eval_results))[1].keys()) - {'model'})
metric = sorted(metric)
metric_direction = []
for m in metric:
if m == 'perplexity':
metric_direction.append('minimize')
else:
m_fn_name = 'metric_%s' % (m[:16] if m.startswith('coherence_gensim') else m)
m_fn = getattr(evaluate, m_fn_name, None)
if m_fn:
metric_direction.append(getattr(m_fn, 'direction', 'unknown'))
else:
metric_direction.append('unknown')
n_metrics = len(metric)
assert n_metrics == len(metric_direction)
metrics_ordered = []
for m_dir in sorted(set(metric_direction), reverse=True):
metrics_ordered.extend([(m, d) for m, d in zip(metric, metric_direction) if d == m_dir])
assert n_metrics == len(metrics_ordered)
# get figure and subplots (axes)
if figsize == 'auto':
figsize = (8, 2*n_metrics)
subplots_kwargs = dict(nrows=n_metrics, ncols=1, sharex=True, constrained_layout=True, figsize=figsize)
subplots_kwargs.update(subplots_opts or {})
subplots_kwargs.update(fig_kwargs)
fig, axes = plt.subplots(**subplots_kwargs)
# set title
if title:
fig.suptitle(title, fontsize=title_fontsize)
x = list(zip(*eval_results))[0]
# set adjustments
if title:
subplots_adjust_kwargs = dict(top=0.9, hspace=0.3)
else:
subplots_adjust_kwargs = {}
subplots_adjust_kwargs.update(subplots_adjust_opts or {})
if subplots_adjust_kwargs:
fig.subplots_adjust(**subplots_adjust_kwargs)
# draw subplot for each metric
axes_pos_per_dir = defaultdict(list)
for i, (ax, (m, m_dir)) in enumerate(zip(axes.flatten(), metrics_ordered)):
if show_metric_direction:
axes_pos_per_dir[m_dir].append(ax.get_position())
y = [metric_res[m] for _, metric_res in eval_results]
ax.plot(x, y, label=m)
ax.set_title(m, fontsize=axes_title_fontsize)
# set axis labels
if xaxislabel and i == len(metric)-1:
ax.set_xlabel(xaxislabel)
if yaxislabel:
ax.set_ylabel(yaxislabel)
# show grouped metric direction on the left
if axes_pos_per_dir: # = if show_metric_direction
left_xs = []
ys = []
for m_dir, bboxes in axes_pos_per_dir.items():
left_xs.append(min(bb.x0 for bb in bboxes))
min_y = min(bb.y0 for bb in bboxes)
max_y = max(bb.y1 for bb in bboxes)
ys.append((min_y, max_y))
left_x = min(left_xs) / 2.5
fig.lines = []
for (min_y, max_y), m_dir in zip(ys, axes_pos_per_dir.keys()):
center_y = min_y + (max_y - min_y) / 2
fig.lines.append(Line2D((left_x, left_x), (min_y, max_y), transform=fig.transFigure, linewidth=5,
color='lightgray'))
fig.text(left_x / 1.5, center_y, m_dir, fontsize=metric_direction_font_size, rotation='vertical',
horizontalalignment='right', verticalalignment='center')
return fig, axes | def function[plot_eval_results, parameter[eval_results, metric, xaxislabel, yaxislabel, title, title_fontsize, axes_title_fontsize, show_metric_direction, metric_direction_font_size, subplots_opts, subplots_adjust_opts, figsize]]:
constant[
Plot the evaluation results from `eval_results`. `eval_results` must be a sequence containing `(param, values)`
tuples, where `param` is the parameter value to appear on the x axis and `values` can be a dict structure
containing the metric values. `eval_results` can be created using the `results_by_parameter` function from the
`topicmod.common` module.
Set `metric` to plot only a specific metric.
Set `xaxislabel` for a label on the x-axis.
Set `yaxislabel` for a label on the y-axis.
Set `title` for a plot title.
Options in a dict `subplots_opts` will be passed to `plt.subplots(...)`.
Options in a dict `subplots_adjust_opts` will be passed to `fig.subplots_adjust(...)`.
`figsize` can be set to a tuple `(width, height)` or to `"auto"` (default) which will set the size to
`(8, 2 * <num. of metrics>)`.
]
if <ast.BoolOp object at 0x7da18f811ea0> begin[:]
<ast.Raise object at 0x7da18f813100>
if <ast.BoolOp object at 0x7da18f813bb0> begin[:]
<ast.Raise object at 0x7da18f812260>
if <ast.BoolOp object at 0x7da18f8102b0> begin[:]
variable[metric] assign[=] list[[<ast.Name object at 0x7da18f813fd0>]]
variable[metric] assign[=] call[name[sorted], parameter[name[metric]]]
variable[metric_direction] assign[=] list[[]]
for taget[name[m]] in starred[name[metric]] begin[:]
if compare[name[m] equal[==] constant[perplexity]] begin[:]
call[name[metric_direction].append, parameter[constant[minimize]]]
variable[n_metrics] assign[=] call[name[len], parameter[name[metric]]]
assert[compare[name[n_metrics] equal[==] call[name[len], parameter[name[metric_direction]]]]]
variable[metrics_ordered] assign[=] list[[]]
for taget[name[m_dir]] in starred[call[name[sorted], parameter[call[name[set], parameter[name[metric_direction]]]]]] begin[:]
call[name[metrics_ordered].extend, parameter[<ast.ListComp object at 0x7da18f8116c0>]]
assert[compare[name[n_metrics] equal[==] call[name[len], parameter[name[metrics_ordered]]]]]
if compare[name[figsize] equal[==] constant[auto]] begin[:]
variable[figsize] assign[=] tuple[[<ast.Constant object at 0x7da18f8129b0>, <ast.BinOp object at 0x7da18f8124d0>]]
variable[subplots_kwargs] assign[=] call[name[dict], parameter[]]
call[name[subplots_kwargs].update, parameter[<ast.BoolOp object at 0x7da18f811e70>]]
call[name[subplots_kwargs].update, parameter[name[fig_kwargs]]]
<ast.Tuple object at 0x7da18f810220> assign[=] call[name[plt].subplots, parameter[]]
if name[title] begin[:]
call[name[fig].suptitle, parameter[name[title]]]
variable[x] assign[=] call[call[name[list], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da18f811b10>]]]]][constant[0]]
if name[title] begin[:]
variable[subplots_adjust_kwargs] assign[=] call[name[dict], parameter[]]
call[name[subplots_adjust_kwargs].update, parameter[<ast.BoolOp object at 0x7da18f810f70>]]
if name[subplots_adjust_kwargs] begin[:]
call[name[fig].subplots_adjust, parameter[]]
variable[axes_pos_per_dir] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da18f811510>, <ast.Tuple object at 0x7da18f811210>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[call[name[axes].flatten, parameter[]], name[metrics_ordered]]]]]] begin[:]
if name[show_metric_direction] begin[:]
call[call[name[axes_pos_per_dir]][name[m_dir]].append, parameter[call[name[ax].get_position, parameter[]]]]
variable[y] assign[=] <ast.ListComp object at 0x7da18ede5090>
call[name[ax].plot, parameter[name[x], name[y]]]
call[name[ax].set_title, parameter[name[m]]]
if <ast.BoolOp object at 0x7da18ede75e0> begin[:]
call[name[ax].set_xlabel, parameter[name[xaxislabel]]]
if name[yaxislabel] begin[:]
call[name[ax].set_ylabel, parameter[name[yaxislabel]]]
if name[axes_pos_per_dir] begin[:]
variable[left_xs] assign[=] list[[]]
variable[ys] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18ede7a90>, <ast.Name object at 0x7da18ede66b0>]]] in starred[call[name[axes_pos_per_dir].items, parameter[]]] begin[:]
call[name[left_xs].append, parameter[call[name[min], parameter[<ast.GeneratorExp object at 0x7da18ede6350>]]]]
variable[min_y] assign[=] call[name[min], parameter[<ast.GeneratorExp object at 0x7da18ede6dd0>]]
variable[max_y] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da18ede5060>]]
call[name[ys].append, parameter[tuple[[<ast.Name object at 0x7da18ede4e80>, <ast.Name object at 0x7da18ede78e0>]]]]
variable[left_x] assign[=] binary_operation[call[name[min], parameter[name[left_xs]]] / constant[2.5]]
name[fig].lines assign[=] list[[]]
for taget[tuple[[<ast.Tuple object at 0x7da18ede5840>, <ast.Name object at 0x7da18ede5390>]]] in starred[call[name[zip], parameter[name[ys], call[name[axes_pos_per_dir].keys, parameter[]]]]] begin[:]
variable[center_y] assign[=] binary_operation[name[min_y] + binary_operation[binary_operation[name[max_y] - name[min_y]] / constant[2]]]
call[name[fig].lines.append, parameter[call[name[Line2D], parameter[tuple[[<ast.Name object at 0x7da2041da440>, <ast.Name object at 0x7da2041d91e0>]], tuple[[<ast.Name object at 0x7da2041da200>, <ast.Name object at 0x7da2041dbb20>]]]]]]
call[name[fig].text, parameter[binary_operation[name[left_x] / constant[1.5]], name[center_y], name[m_dir]]]
return[tuple[[<ast.Name object at 0x7da2041dab00>, <ast.Name object at 0x7da2041d95a0>]]] | keyword[def] identifier[plot_eval_results] ( identifier[eval_results] , identifier[metric] = keyword[None] , identifier[xaxislabel] = keyword[None] , identifier[yaxislabel] = keyword[None] ,
identifier[title] = keyword[None] , identifier[title_fontsize] = literal[string] , identifier[axes_title_fontsize] = literal[string] ,
identifier[show_metric_direction] = keyword[True] , identifier[metric_direction_font_size] = literal[string] ,
identifier[subplots_opts] = keyword[None] , identifier[subplots_adjust_opts] = keyword[None] , identifier[figsize] = literal[string] ,
** identifier[fig_kwargs] ):
literal[string]
keyword[if] identifier[type] ( identifier[eval_results] ) keyword[not] keyword[in] ( identifier[list] , identifier[tuple] ) keyword[or] keyword[not] identifier[eval_results] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[type] ( identifier[eval_results] [ literal[int] ]) keyword[not] keyword[in] ( identifier[list] , identifier[tuple] ) keyword[or] identifier[len] ( identifier[eval_results] [ literal[int] ])!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[metric] keyword[is] keyword[not] keyword[None] keyword[and] identifier[type] ( identifier[metric] ) keyword[not] keyword[in] ( identifier[list] , identifier[tuple] ):
identifier[metric] =[ identifier[metric] ]
keyword[elif] identifier[metric] keyword[is] keyword[None] :
identifier[metric] = identifier[list] ( identifier[set] ( identifier[next] ( identifier[iter] ( identifier[eval_results] ))[ literal[int] ]. identifier[keys] ())-{ literal[string] })
identifier[metric] = identifier[sorted] ( identifier[metric] )
identifier[metric_direction] =[]
keyword[for] identifier[m] keyword[in] identifier[metric] :
keyword[if] identifier[m] == literal[string] :
identifier[metric_direction] . identifier[append] ( literal[string] )
keyword[else] :
identifier[m_fn_name] = literal[string] %( identifier[m] [: literal[int] ] keyword[if] identifier[m] . identifier[startswith] ( literal[string] ) keyword[else] identifier[m] )
identifier[m_fn] = identifier[getattr] ( identifier[evaluate] , identifier[m_fn_name] , keyword[None] )
keyword[if] identifier[m_fn] :
identifier[metric_direction] . identifier[append] ( identifier[getattr] ( identifier[m_fn] , literal[string] , literal[string] ))
keyword[else] :
identifier[metric_direction] . identifier[append] ( literal[string] )
identifier[n_metrics] = identifier[len] ( identifier[metric] )
keyword[assert] identifier[n_metrics] == identifier[len] ( identifier[metric_direction] )
identifier[metrics_ordered] =[]
keyword[for] identifier[m_dir] keyword[in] identifier[sorted] ( identifier[set] ( identifier[metric_direction] ), identifier[reverse] = keyword[True] ):
identifier[metrics_ordered] . identifier[extend] ([( identifier[m] , identifier[d] ) keyword[for] identifier[m] , identifier[d] keyword[in] identifier[zip] ( identifier[metric] , identifier[metric_direction] ) keyword[if] identifier[d] == identifier[m_dir] ])
keyword[assert] identifier[n_metrics] == identifier[len] ( identifier[metrics_ordered] )
keyword[if] identifier[figsize] == literal[string] :
identifier[figsize] =( literal[int] , literal[int] * identifier[n_metrics] )
identifier[subplots_kwargs] = identifier[dict] ( identifier[nrows] = identifier[n_metrics] , identifier[ncols] = literal[int] , identifier[sharex] = keyword[True] , identifier[constrained_layout] = keyword[True] , identifier[figsize] = identifier[figsize] )
identifier[subplots_kwargs] . identifier[update] ( identifier[subplots_opts] keyword[or] {})
identifier[subplots_kwargs] . identifier[update] ( identifier[fig_kwargs] )
identifier[fig] , identifier[axes] = identifier[plt] . identifier[subplots] (** identifier[subplots_kwargs] )
keyword[if] identifier[title] :
identifier[fig] . identifier[suptitle] ( identifier[title] , identifier[fontsize] = identifier[title_fontsize] )
identifier[x] = identifier[list] ( identifier[zip] (* identifier[eval_results] ))[ literal[int] ]
keyword[if] identifier[title] :
identifier[subplots_adjust_kwargs] = identifier[dict] ( identifier[top] = literal[int] , identifier[hspace] = literal[int] )
keyword[else] :
identifier[subplots_adjust_kwargs] ={}
identifier[subplots_adjust_kwargs] . identifier[update] ( identifier[subplots_adjust_opts] keyword[or] {})
keyword[if] identifier[subplots_adjust_kwargs] :
identifier[fig] . identifier[subplots_adjust] (** identifier[subplots_adjust_kwargs] )
identifier[axes_pos_per_dir] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[i] ,( identifier[ax] ,( identifier[m] , identifier[m_dir] )) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[axes] . identifier[flatten] (), identifier[metrics_ordered] )):
keyword[if] identifier[show_metric_direction] :
identifier[axes_pos_per_dir] [ identifier[m_dir] ]. identifier[append] ( identifier[ax] . identifier[get_position] ())
identifier[y] =[ identifier[metric_res] [ identifier[m] ] keyword[for] identifier[_] , identifier[metric_res] keyword[in] identifier[eval_results] ]
identifier[ax] . identifier[plot] ( identifier[x] , identifier[y] , identifier[label] = identifier[m] )
identifier[ax] . identifier[set_title] ( identifier[m] , identifier[fontsize] = identifier[axes_title_fontsize] )
keyword[if] identifier[xaxislabel] keyword[and] identifier[i] == identifier[len] ( identifier[metric] )- literal[int] :
identifier[ax] . identifier[set_xlabel] ( identifier[xaxislabel] )
keyword[if] identifier[yaxislabel] :
identifier[ax] . identifier[set_ylabel] ( identifier[yaxislabel] )
keyword[if] identifier[axes_pos_per_dir] :
identifier[left_xs] =[]
identifier[ys] =[]
keyword[for] identifier[m_dir] , identifier[bboxes] keyword[in] identifier[axes_pos_per_dir] . identifier[items] ():
identifier[left_xs] . identifier[append] ( identifier[min] ( identifier[bb] . identifier[x0] keyword[for] identifier[bb] keyword[in] identifier[bboxes] ))
identifier[min_y] = identifier[min] ( identifier[bb] . identifier[y0] keyword[for] identifier[bb] keyword[in] identifier[bboxes] )
identifier[max_y] = identifier[max] ( identifier[bb] . identifier[y1] keyword[for] identifier[bb] keyword[in] identifier[bboxes] )
identifier[ys] . identifier[append] (( identifier[min_y] , identifier[max_y] ))
identifier[left_x] = identifier[min] ( identifier[left_xs] )/ literal[int]
identifier[fig] . identifier[lines] =[]
keyword[for] ( identifier[min_y] , identifier[max_y] ), identifier[m_dir] keyword[in] identifier[zip] ( identifier[ys] , identifier[axes_pos_per_dir] . identifier[keys] ()):
identifier[center_y] = identifier[min_y] +( identifier[max_y] - identifier[min_y] )/ literal[int]
identifier[fig] . identifier[lines] . identifier[append] ( identifier[Line2D] (( identifier[left_x] , identifier[left_x] ),( identifier[min_y] , identifier[max_y] ), identifier[transform] = identifier[fig] . identifier[transFigure] , identifier[linewidth] = literal[int] ,
identifier[color] = literal[string] ))
identifier[fig] . identifier[text] ( identifier[left_x] / literal[int] , identifier[center_y] , identifier[m_dir] , identifier[fontsize] = identifier[metric_direction_font_size] , identifier[rotation] = literal[string] ,
identifier[horizontalalignment] = literal[string] , identifier[verticalalignment] = literal[string] )
keyword[return] identifier[fig] , identifier[axes] | def plot_eval_results(eval_results, metric=None, xaxislabel=None, yaxislabel=None, title=None, title_fontsize='x-large', axes_title_fontsize='large', show_metric_direction=True, metric_direction_font_size='large', subplots_opts=None, subplots_adjust_opts=None, figsize='auto', **fig_kwargs):
"""
Plot the evaluation results from `eval_results`. `eval_results` must be a sequence containing `(param, values)`
tuples, where `param` is the parameter value to appear on the x axis and `values` can be a dict structure
containing the metric values. `eval_results` can be created using the `results_by_parameter` function from the
`topicmod.common` module.
Set `metric` to plot only a specific metric.
Set `xaxislabel` for a label on the x-axis.
Set `yaxislabel` for a label on the y-axis.
Set `title` for a plot title.
Options in a dict `subplots_opts` will be passed to `plt.subplots(...)`.
Options in a dict `subplots_adjust_opts` will be passed to `fig.subplots_adjust(...)`.
`figsize` can be set to a tuple `(width, height)` or to `"auto"` (default) which will set the size to
`(8, 2 * <num. of metrics>)`.
"""
if type(eval_results) not in (list, tuple) or not eval_results:
raise ValueError('`eval_results` must be a list or tuple with at least one element') # depends on [control=['if'], data=[]]
if type(eval_results[0]) not in (list, tuple) or len(eval_results[0]) != 2:
raise ValueError('`eval_results` must be a list or tuple containing a (param, values) tuple. Maybe `eval_results` must be converted with `results_by_parameter`.') # depends on [control=['if'], data=[]]
if metric is not None and type(metric) not in (list, tuple):
metric = [metric] # depends on [control=['if'], data=[]]
elif metric is None:
# remove special evaluation result 'model': the calculated model itself
metric = list(set(next(iter(eval_results))[1].keys()) - {'model'}) # depends on [control=['if'], data=['metric']]
metric = sorted(metric)
metric_direction = []
for m in metric:
if m == 'perplexity':
metric_direction.append('minimize') # depends on [control=['if'], data=[]]
else:
m_fn_name = 'metric_%s' % (m[:16] if m.startswith('coherence_gensim') else m)
m_fn = getattr(evaluate, m_fn_name, None)
if m_fn:
metric_direction.append(getattr(m_fn, 'direction', 'unknown')) # depends on [control=['if'], data=[]]
else:
metric_direction.append('unknown') # depends on [control=['for'], data=['m']]
n_metrics = len(metric)
assert n_metrics == len(metric_direction)
metrics_ordered = []
for m_dir in sorted(set(metric_direction), reverse=True):
metrics_ordered.extend([(m, d) for (m, d) in zip(metric, metric_direction) if d == m_dir]) # depends on [control=['for'], data=['m_dir']]
assert n_metrics == len(metrics_ordered)
# get figure and subplots (axes)
if figsize == 'auto':
figsize = (8, 2 * n_metrics) # depends on [control=['if'], data=['figsize']]
subplots_kwargs = dict(nrows=n_metrics, ncols=1, sharex=True, constrained_layout=True, figsize=figsize)
subplots_kwargs.update(subplots_opts or {})
subplots_kwargs.update(fig_kwargs)
(fig, axes) = plt.subplots(**subplots_kwargs)
# set title
if title:
fig.suptitle(title, fontsize=title_fontsize) # depends on [control=['if'], data=[]]
x = list(zip(*eval_results))[0]
# set adjustments
if title:
subplots_adjust_kwargs = dict(top=0.9, hspace=0.3) # depends on [control=['if'], data=[]]
else:
subplots_adjust_kwargs = {}
subplots_adjust_kwargs.update(subplots_adjust_opts or {})
if subplots_adjust_kwargs:
fig.subplots_adjust(**subplots_adjust_kwargs) # depends on [control=['if'], data=[]]
# draw subplot for each metric
axes_pos_per_dir = defaultdict(list)
for (i, (ax, (m, m_dir))) in enumerate(zip(axes.flatten(), metrics_ordered)):
if show_metric_direction:
axes_pos_per_dir[m_dir].append(ax.get_position()) # depends on [control=['if'], data=[]]
y = [metric_res[m] for (_, metric_res) in eval_results]
ax.plot(x, y, label=m)
ax.set_title(m, fontsize=axes_title_fontsize)
# set axis labels
if xaxislabel and i == len(metric) - 1:
ax.set_xlabel(xaxislabel) # depends on [control=['if'], data=[]]
if yaxislabel:
ax.set_ylabel(yaxislabel) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# show grouped metric direction on the left
if axes_pos_per_dir: # = if show_metric_direction
left_xs = []
ys = []
for (m_dir, bboxes) in axes_pos_per_dir.items():
left_xs.append(min((bb.x0 for bb in bboxes)))
min_y = min((bb.y0 for bb in bboxes))
max_y = max((bb.y1 for bb in bboxes))
ys.append((min_y, max_y)) # depends on [control=['for'], data=[]]
left_x = min(left_xs) / 2.5
fig.lines = []
for ((min_y, max_y), m_dir) in zip(ys, axes_pos_per_dir.keys()):
center_y = min_y + (max_y - min_y) / 2
fig.lines.append(Line2D((left_x, left_x), (min_y, max_y), transform=fig.transFigure, linewidth=5, color='lightgray'))
fig.text(left_x / 1.5, center_y, m_dir, fontsize=metric_direction_font_size, rotation='vertical', horizontalalignment='right', verticalalignment='center') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return (fig, axes) |
def add_custom_aggregation(self, agg, name=None):
"""
Takes in an es_dsl Aggregation object and adds it to the aggregation dict.
Can be used to add custom aggregations such as moving averages
:param agg: aggregation to be added to the es_dsl search object
:param name: name of the aggregation object (optional)
:returns: self, which allows the method to be chainable with the other methods
"""
agg_name = name if name else 'custom_agg'
self.aggregations[agg_name] = agg
return self | def function[add_custom_aggregation, parameter[self, agg, name]]:
constant[
Takes in an es_dsl Aggregation object and adds it to the aggregation dict.
Can be used to add custom aggregations such as moving averages
:param agg: aggregation to be added to the es_dsl search object
:param name: name of the aggregation object (optional)
:returns: self, which allows the method to be chainable with the other methods
]
variable[agg_name] assign[=] <ast.IfExp object at 0x7da1b2604910>
call[name[self].aggregations][name[agg_name]] assign[=] name[agg]
return[name[self]] | keyword[def] identifier[add_custom_aggregation] ( identifier[self] , identifier[agg] , identifier[name] = keyword[None] ):
literal[string]
identifier[agg_name] = identifier[name] keyword[if] identifier[name] keyword[else] literal[string]
identifier[self] . identifier[aggregations] [ identifier[agg_name] ]= identifier[agg]
keyword[return] identifier[self] | def add_custom_aggregation(self, agg, name=None):
"""
Takes in an es_dsl Aggregation object and adds it to the aggregation dict.
Can be used to add custom aggregations such as moving averages
:param agg: aggregation to be added to the es_dsl search object
:param name: name of the aggregation object (optional)
:returns: self, which allows the method to be chainable with the other methods
"""
agg_name = name if name else 'custom_agg'
self.aggregations[agg_name] = agg
return self |
def get_available_detectors():
"""Return list of detectors known in the currently sourced lalsuite.
This function will query lalsuite about which detectors are known to
lalsuite. Detectors are identified by a two character string e.g. 'K1',
but also by a longer, and clearer name, e.g. KAGRA. This function returns
both. As LAL doesn't really expose this functionality we have to make some
assumptions about how this information is stored in LAL. Therefore while
we hope this function will work correctly, it's possible it will need
updating in the future. Better if lal would expose this information
properly.
"""
ld = lal.__dict__
known_lal_names = [j for j in ld.keys() if "DETECTOR_PREFIX" in j]
known_prefixes = [ld[k] for k in known_lal_names]
known_names = [ld[k.replace('PREFIX', 'NAME')] for k in known_lal_names]
return zip(known_prefixes, known_names) | def function[get_available_detectors, parameter[]]:
constant[Return list of detectors known in the currently sourced lalsuite.
This function will query lalsuite about which detectors are known to
lalsuite. Detectors are identified by a two character string e.g. 'K1',
but also by a longer, and clearer name, e.g. KAGRA. This function returns
both. As LAL doesn't really expose this functionality we have to make some
assumptions about how this information is stored in LAL. Therefore while
we hope this function will work correctly, it's possible it will need
updating in the future. Better if lal would expose this information
properly.
]
variable[ld] assign[=] name[lal].__dict__
variable[known_lal_names] assign[=] <ast.ListComp object at 0x7da20c991120>
variable[known_prefixes] assign[=] <ast.ListComp object at 0x7da20c9938b0>
variable[known_names] assign[=] <ast.ListComp object at 0x7da20c9921d0>
return[call[name[zip], parameter[name[known_prefixes], name[known_names]]]] | keyword[def] identifier[get_available_detectors] ():
literal[string]
identifier[ld] = identifier[lal] . identifier[__dict__]
identifier[known_lal_names] =[ identifier[j] keyword[for] identifier[j] keyword[in] identifier[ld] . identifier[keys] () keyword[if] literal[string] keyword[in] identifier[j] ]
identifier[known_prefixes] =[ identifier[ld] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[known_lal_names] ]
identifier[known_names] =[ identifier[ld] [ identifier[k] . identifier[replace] ( literal[string] , literal[string] )] keyword[for] identifier[k] keyword[in] identifier[known_lal_names] ]
keyword[return] identifier[zip] ( identifier[known_prefixes] , identifier[known_names] ) | def get_available_detectors():
"""Return list of detectors known in the currently sourced lalsuite.
This function will query lalsuite about which detectors are known to
lalsuite. Detectors are identified by a two character string e.g. 'K1',
but also by a longer, and clearer name, e.g. KAGRA. This function returns
both. As LAL doesn't really expose this functionality we have to make some
assumptions about how this information is stored in LAL. Therefore while
we hope this function will work correctly, it's possible it will need
updating in the future. Better if lal would expose this information
properly.
"""
ld = lal.__dict__
known_lal_names = [j for j in ld.keys() if 'DETECTOR_PREFIX' in j]
known_prefixes = [ld[k] for k in known_lal_names]
known_names = [ld[k.replace('PREFIX', 'NAME')] for k in known_lal_names]
return zip(known_prefixes, known_names) |
def clear(self):
"""Clear pool connections."""
while not self._pool.empty():
conn = yield from self._pool.get()
self._do_close(conn) | def function[clear, parameter[self]]:
constant[Clear pool connections.]
while <ast.UnaryOp object at 0x7da1b0d8cb80> begin[:]
variable[conn] assign[=] <ast.YieldFrom object at 0x7da1b0d8c340>
call[name[self]._do_close, parameter[name[conn]]] | keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[_pool] . identifier[empty] ():
identifier[conn] = keyword[yield] keyword[from] identifier[self] . identifier[_pool] . identifier[get] ()
identifier[self] . identifier[_do_close] ( identifier[conn] ) | def clear(self):
"""Clear pool connections."""
while not self._pool.empty():
conn = (yield from self._pool.get())
self._do_close(conn) # depends on [control=['while'], data=[]] |
def add_membership(self, email, role, **attrs):
"""
Add a Membership to the project and returns a
:class:`Membership` resource.
:param email: email for :class:`Membership`
:param role: role for :class:`Membership`
:param attrs: role for :class:`Membership`
:param attrs: optional :class:`Membership` attributes
"""
return Memberships(self.requester).create(
self.id, email, role, **attrs
) | def function[add_membership, parameter[self, email, role]]:
constant[
Add a Membership to the project and returns a
:class:`Membership` resource.
:param email: email for :class:`Membership`
:param role: role for :class:`Membership`
:param attrs: role for :class:`Membership`
:param attrs: optional :class:`Membership` attributes
]
return[call[call[name[Memberships], parameter[name[self].requester]].create, parameter[name[self].id, name[email], name[role]]]] | keyword[def] identifier[add_membership] ( identifier[self] , identifier[email] , identifier[role] ,** identifier[attrs] ):
literal[string]
keyword[return] identifier[Memberships] ( identifier[self] . identifier[requester] ). identifier[create] (
identifier[self] . identifier[id] , identifier[email] , identifier[role] ,** identifier[attrs]
) | def add_membership(self, email, role, **attrs):
"""
Add a Membership to the project and returns a
:class:`Membership` resource.
:param email: email for :class:`Membership`
:param role: role for :class:`Membership`
:param attrs: role for :class:`Membership`
:param attrs: optional :class:`Membership` attributes
"""
return Memberships(self.requester).create(self.id, email, role, **attrs) |
def argmax(input_, multi=False):
"""
Returns index / key of the item with the largest value.
Args:
input_ (dict or list):
References:
http://stackoverflow.com/questions/16945518/python-argmin-argmax
Ignore:
list_ = np.random.rand(10000).tolist()
%timeit list_.index(max(list_))
%timeit max(enumerate(list_), key=operator.itemgetter(1))[0]
%timeit max(enumerate(list_), key=lambda x: x[1])[0]
%timeit max(range(len(list_)), key=list_.__getitem__)
input_ = dict_
list_ = np.random.rand(100000).tolist()
dict_ = {str(ut.random_uuid()): x for x in list_}
%timeit list(input_.keys())[ut.argmax(list(input_.values()))]
%timeit max(input_.items(), key=operator.itemgetter(1))[0]
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_list import *
>>> import utool as ut
>>> input_ = [1, 2, 3, 3, 2, 3, 2, 1]
>>> ut.argmax(input_, multi=True)
>>> input_ = {1: 4, 2: 2, 3: 3, 3: 4}
>>> ut.argmax(input_, multi=True)
"""
if multi:
if isinstance(input_, dict):
keys = list(input_.keys())
values = list(input_.values())
return [keys[idx] for idx in argmax(values, multi=multi)]
else:
return where(equal([max(input_)], input_))
else:
if isinstance(input_, dict):
# its crazy, but this is faster
# max(input_.items(), key=operator.itemgetter(1))[0]
return list(input_.keys())[argmax(list(input_.values()))]
elif hasattr(input_, 'index'):
return input_.index(max(input_))
else:
return max(enumerate(input_), key=operator.itemgetter(1))[0] | def function[argmax, parameter[input_, multi]]:
constant[
Returns index / key of the item with the largest value.
Args:
input_ (dict or list):
References:
http://stackoverflow.com/questions/16945518/python-argmin-argmax
Ignore:
list_ = np.random.rand(10000).tolist()
%timeit list_.index(max(list_))
%timeit max(enumerate(list_), key=operator.itemgetter(1))[0]
%timeit max(enumerate(list_), key=lambda x: x[1])[0]
%timeit max(range(len(list_)), key=list_.__getitem__)
input_ = dict_
list_ = np.random.rand(100000).tolist()
dict_ = {str(ut.random_uuid()): x for x in list_}
%timeit list(input_.keys())[ut.argmax(list(input_.values()))]
%timeit max(input_.items(), key=operator.itemgetter(1))[0]
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_list import *
>>> import utool as ut
>>> input_ = [1, 2, 3, 3, 2, 3, 2, 1]
>>> ut.argmax(input_, multi=True)
>>> input_ = {1: 4, 2: 2, 3: 3, 3: 4}
>>> ut.argmax(input_, multi=True)
]
if name[multi] begin[:]
if call[name[isinstance], parameter[name[input_], name[dict]]] begin[:]
variable[keys] assign[=] call[name[list], parameter[call[name[input_].keys, parameter[]]]]
variable[values] assign[=] call[name[list], parameter[call[name[input_].values, parameter[]]]]
return[<ast.ListComp object at 0x7da1b24e6d40>] | keyword[def] identifier[argmax] ( identifier[input_] , identifier[multi] = keyword[False] ):
literal[string]
keyword[if] identifier[multi] :
keyword[if] identifier[isinstance] ( identifier[input_] , identifier[dict] ):
identifier[keys] = identifier[list] ( identifier[input_] . identifier[keys] ())
identifier[values] = identifier[list] ( identifier[input_] . identifier[values] ())
keyword[return] [ identifier[keys] [ identifier[idx] ] keyword[for] identifier[idx] keyword[in] identifier[argmax] ( identifier[values] , identifier[multi] = identifier[multi] )]
keyword[else] :
keyword[return] identifier[where] ( identifier[equal] ([ identifier[max] ( identifier[input_] )], identifier[input_] ))
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[input_] , identifier[dict] ):
keyword[return] identifier[list] ( identifier[input_] . identifier[keys] ())[ identifier[argmax] ( identifier[list] ( identifier[input_] . identifier[values] ()))]
keyword[elif] identifier[hasattr] ( identifier[input_] , literal[string] ):
keyword[return] identifier[input_] . identifier[index] ( identifier[max] ( identifier[input_] ))
keyword[else] :
keyword[return] identifier[max] ( identifier[enumerate] ( identifier[input_] ), identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ))[ literal[int] ] | def argmax(input_, multi=False):
"""
Returns index / key of the item with the largest value.
Args:
input_ (dict or list):
References:
http://stackoverflow.com/questions/16945518/python-argmin-argmax
Ignore:
list_ = np.random.rand(10000).tolist()
%timeit list_.index(max(list_))
%timeit max(enumerate(list_), key=operator.itemgetter(1))[0]
%timeit max(enumerate(list_), key=lambda x: x[1])[0]
%timeit max(range(len(list_)), key=list_.__getitem__)
input_ = dict_
list_ = np.random.rand(100000).tolist()
dict_ = {str(ut.random_uuid()): x for x in list_}
%timeit list(input_.keys())[ut.argmax(list(input_.values()))]
%timeit max(input_.items(), key=operator.itemgetter(1))[0]
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_list import *
>>> import utool as ut
>>> input_ = [1, 2, 3, 3, 2, 3, 2, 1]
>>> ut.argmax(input_, multi=True)
>>> input_ = {1: 4, 2: 2, 3: 3, 3: 4}
>>> ut.argmax(input_, multi=True)
"""
if multi:
if isinstance(input_, dict):
keys = list(input_.keys())
values = list(input_.values())
return [keys[idx] for idx in argmax(values, multi=multi)] # depends on [control=['if'], data=[]]
else:
return where(equal([max(input_)], input_)) # depends on [control=['if'], data=[]]
elif isinstance(input_, dict):
# its crazy, but this is faster
# max(input_.items(), key=operator.itemgetter(1))[0]
return list(input_.keys())[argmax(list(input_.values()))] # depends on [control=['if'], data=[]]
elif hasattr(input_, 'index'):
return input_.index(max(input_)) # depends on [control=['if'], data=[]]
else:
return max(enumerate(input_), key=operator.itemgetter(1))[0] |
def __get_isbn(self, html):
"""
从图书借阅状态页面中获取isbn
:param html:
:return:
"""
import re
reg = re.compile(r'getBookCover\(".*","(.*)"\);')
res = reg.findall(html)
if len(res) > 0:
return res[0]
else:
return '' | def function[__get_isbn, parameter[self, html]]:
constant[
从图书借阅状态页面中获取isbn
:param html:
:return:
]
import module[re]
variable[reg] assign[=] call[name[re].compile, parameter[constant[getBookCover\(".*","(.*)"\);]]]
variable[res] assign[=] call[name[reg].findall, parameter[name[html]]]
if compare[call[name[len], parameter[name[res]]] greater[>] constant[0]] begin[:]
return[call[name[res]][constant[0]]] | keyword[def] identifier[__get_isbn] ( identifier[self] , identifier[html] ):
literal[string]
keyword[import] identifier[re]
identifier[reg] = identifier[re] . identifier[compile] ( literal[string] )
identifier[res] = identifier[reg] . identifier[findall] ( identifier[html] )
keyword[if] identifier[len] ( identifier[res] )> literal[int] :
keyword[return] identifier[res] [ literal[int] ]
keyword[else] :
keyword[return] literal[string] | def __get_isbn(self, html):
"""
从图书借阅状态页面中获取isbn
:param html:
:return:
"""
import re
reg = re.compile('getBookCover\\(".*","(.*)"\\);')
res = reg.findall(html)
if len(res) > 0:
return res[0] # depends on [control=['if'], data=[]]
else:
return '' |
def lpc(y, order):
"""Linear Prediction Coefficients via Burg's method
This function applies Burg's method to estimate coefficients of a linear
filter on `y` of order `order`. Burg's method is an extension to the
Yule-Walker approach, which are both sometimes referred to as LPC parameter
estimation by autocorrelation.
It follows the description and implementation approach described in the
introduction in [1]_. N.B. This paper describes a different method, which
is not implemented here, but has been chosen for its clear explanation of
Burg's technique in its introduction.
.. [1] Larry Marple
A New Autoregressive Spectrum Analysis Algorithm
IEEE Transactions on Accoustics, Speech, and Signal Processing
vol 28, no. 4, 1980
Parameters
----------
y : np.ndarray
Time series to fit
order : int > 0
Order of the linear filter
Returns
-------
a : np.ndarray of length order + 1
LP prediction error coefficients, i.e. filter denominator polynomial
Raises
------
ParameterError
- If y is not valid audio as per `util.valid_audio`
- If order < 1 or not integer
FloatingPointError
- If y is ill-conditioned
See also
--------
scipy.signal.lfilter
Examples
--------
Compute LP coefficients of y at order 16 on entire series
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=10)
>>> librosa.lpc(y, 16)
Compute LP coefficients, and plot LP estimate of original series
>>> import matplotlib.pyplot as plt
>>> import scipy
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=0.020)
>>> a = librosa.lpc(y, 2)
>>> y_hat = scipy.signal.lfilter([0] + -1*a[1:], [1], y)
>>> plt.figure()
>>> plt.plot(y)
>>> plt.plot(y_hat)
>>> plt.legend(['y', 'y_hat'])
>>> plt.title('LP Model Forward Prediction')
"""
if not isinstance(order, int) or order < 1:
raise ParameterError("order must be an integer > 0")
util.valid_audio(y, mono=True)
return __lpc(y, order) | def function[lpc, parameter[y, order]]:
constant[Linear Prediction Coefficients via Burg's method
This function applies Burg's method to estimate coefficients of a linear
filter on `y` of order `order`. Burg's method is an extension to the
Yule-Walker approach, which are both sometimes referred to as LPC parameter
estimation by autocorrelation.
It follows the description and implementation approach described in the
introduction in [1]_. N.B. This paper describes a different method, which
is not implemented here, but has been chosen for its clear explanation of
Burg's technique in its introduction.
.. [1] Larry Marple
A New Autoregressive Spectrum Analysis Algorithm
IEEE Transactions on Accoustics, Speech, and Signal Processing
vol 28, no. 4, 1980
Parameters
----------
y : np.ndarray
Time series to fit
order : int > 0
Order of the linear filter
Returns
-------
a : np.ndarray of length order + 1
LP prediction error coefficients, i.e. filter denominator polynomial
Raises
------
ParameterError
- If y is not valid audio as per `util.valid_audio`
- If order < 1 or not integer
FloatingPointError
- If y is ill-conditioned
See also
--------
scipy.signal.lfilter
Examples
--------
Compute LP coefficients of y at order 16 on entire series
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=10)
>>> librosa.lpc(y, 16)
Compute LP coefficients, and plot LP estimate of original series
>>> import matplotlib.pyplot as plt
>>> import scipy
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=0.020)
>>> a = librosa.lpc(y, 2)
>>> y_hat = scipy.signal.lfilter([0] + -1*a[1:], [1], y)
>>> plt.figure()
>>> plt.plot(y)
>>> plt.plot(y_hat)
>>> plt.legend(['y', 'y_hat'])
>>> plt.title('LP Model Forward Prediction')
]
if <ast.BoolOp object at 0x7da1b055c5e0> begin[:]
<ast.Raise object at 0x7da1b055d3f0>
call[name[util].valid_audio, parameter[name[y]]]
return[call[name[__lpc], parameter[name[y], name[order]]]] | keyword[def] identifier[lpc] ( identifier[y] , identifier[order] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[order] , identifier[int] ) keyword[or] identifier[order] < literal[int] :
keyword[raise] identifier[ParameterError] ( literal[string] )
identifier[util] . identifier[valid_audio] ( identifier[y] , identifier[mono] = keyword[True] )
keyword[return] identifier[__lpc] ( identifier[y] , identifier[order] ) | def lpc(y, order):
"""Linear Prediction Coefficients via Burg's method
This function applies Burg's method to estimate coefficients of a linear
filter on `y` of order `order`. Burg's method is an extension to the
Yule-Walker approach, which are both sometimes referred to as LPC parameter
estimation by autocorrelation.
It follows the description and implementation approach described in the
introduction in [1]_. N.B. This paper describes a different method, which
is not implemented here, but has been chosen for its clear explanation of
Burg's technique in its introduction.
.. [1] Larry Marple
A New Autoregressive Spectrum Analysis Algorithm
IEEE Transactions on Accoustics, Speech, and Signal Processing
vol 28, no. 4, 1980
Parameters
----------
y : np.ndarray
Time series to fit
order : int > 0
Order of the linear filter
Returns
-------
a : np.ndarray of length order + 1
LP prediction error coefficients, i.e. filter denominator polynomial
Raises
------
ParameterError
- If y is not valid audio as per `util.valid_audio`
- If order < 1 or not integer
FloatingPointError
- If y is ill-conditioned
See also
--------
scipy.signal.lfilter
Examples
--------
Compute LP coefficients of y at order 16 on entire series
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=10)
>>> librosa.lpc(y, 16)
Compute LP coefficients, and plot LP estimate of original series
>>> import matplotlib.pyplot as plt
>>> import scipy
>>> y, sr = librosa.load(librosa.util.example_audio_file(), offset=30,
... duration=0.020)
>>> a = librosa.lpc(y, 2)
>>> y_hat = scipy.signal.lfilter([0] + -1*a[1:], [1], y)
>>> plt.figure()
>>> plt.plot(y)
>>> plt.plot(y_hat)
>>> plt.legend(['y', 'y_hat'])
>>> plt.title('LP Model Forward Prediction')
"""
if not isinstance(order, int) or order < 1:
raise ParameterError('order must be an integer > 0') # depends on [control=['if'], data=[]]
util.valid_audio(y, mono=True)
return __lpc(y, order) |
def add_update_callback(self, group=None, name=None, cb=None):
"""
Add a callback for a specific parameter name. This callback will be
executed when a new value is read from the Crazyflie.
"""
if not group and not name:
self.all_update_callback.add_callback(cb)
elif not name:
if group not in self.group_update_callbacks:
self.group_update_callbacks[group] = Caller()
self.group_update_callbacks[group].add_callback(cb)
else:
paramname = '{}.{}'.format(group, name)
if paramname not in self.param_update_callbacks:
self.param_update_callbacks[paramname] = Caller()
self.param_update_callbacks[paramname].add_callback(cb) | def function[add_update_callback, parameter[self, group, name, cb]]:
constant[
Add a callback for a specific parameter name. This callback will be
executed when a new value is read from the Crazyflie.
]
if <ast.BoolOp object at 0x7da1b16b1ea0> begin[:]
call[name[self].all_update_callback.add_callback, parameter[name[cb]]] | keyword[def] identifier[add_update_callback] ( identifier[self] , identifier[group] = keyword[None] , identifier[name] = keyword[None] , identifier[cb] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[group] keyword[and] keyword[not] identifier[name] :
identifier[self] . identifier[all_update_callback] . identifier[add_callback] ( identifier[cb] )
keyword[elif] keyword[not] identifier[name] :
keyword[if] identifier[group] keyword[not] keyword[in] identifier[self] . identifier[group_update_callbacks] :
identifier[self] . identifier[group_update_callbacks] [ identifier[group] ]= identifier[Caller] ()
identifier[self] . identifier[group_update_callbacks] [ identifier[group] ]. identifier[add_callback] ( identifier[cb] )
keyword[else] :
identifier[paramname] = literal[string] . identifier[format] ( identifier[group] , identifier[name] )
keyword[if] identifier[paramname] keyword[not] keyword[in] identifier[self] . identifier[param_update_callbacks] :
identifier[self] . identifier[param_update_callbacks] [ identifier[paramname] ]= identifier[Caller] ()
identifier[self] . identifier[param_update_callbacks] [ identifier[paramname] ]. identifier[add_callback] ( identifier[cb] ) | def add_update_callback(self, group=None, name=None, cb=None):
"""
Add a callback for a specific parameter name. This callback will be
executed when a new value is read from the Crazyflie.
"""
if not group and (not name):
self.all_update_callback.add_callback(cb) # depends on [control=['if'], data=[]]
elif not name:
if group not in self.group_update_callbacks:
self.group_update_callbacks[group] = Caller() # depends on [control=['if'], data=['group']]
self.group_update_callbacks[group].add_callback(cb) # depends on [control=['if'], data=[]]
else:
paramname = '{}.{}'.format(group, name)
if paramname not in self.param_update_callbacks:
self.param_update_callbacks[paramname] = Caller() # depends on [control=['if'], data=['paramname']]
self.param_update_callbacks[paramname].add_callback(cb) |
def tile(self, z, x, y):
"""
Download the specified tile from `tiles_url`
"""
logger.debug(_("Download tile %s") % ((z, x, y),))
# Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... )
size = self.tilesize
s = self.tiles_subdomains[(x + y) % len(self.tiles_subdomains)];
try:
url = self.tiles_url.format(**locals())
except KeyError as e:
raise DownloadError(_("Unknown keyword %s in URL") % e)
logger.debug(_("Retrieve tile at %s") % url)
r = DOWNLOAD_RETRIES
sleeptime = 1
while r > 0:
try:
request = requests.get(url, headers=self.headers)
if request.status_code == 200:
return request.content
raise DownloadError(_("Status code : %s, url : %s") % (request.status_code, url))
except requests.exceptions.ConnectionError as e:
logger.debug(_("Download error, retry (%s left). (%s)") % (r, e))
r -= 1
time.sleep(sleeptime)
# progressivly sleep longer to wait for this tile
if (sleeptime <= 10) and (r % 2 == 0):
sleeptime += 1 # increase wait
raise DownloadError(_("Cannot download URL %s") % url) | def function[tile, parameter[self, z, x, y]]:
constant[
Download the specified tile from `tiles_url`
]
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[Download tile %s]]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Tuple object at 0x7da207f00f10>]]]]]
variable[size] assign[=] name[self].tilesize
variable[s] assign[=] call[name[self].tiles_subdomains][binary_operation[binary_operation[name[x] + name[y]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].tiles_subdomains]]]]
<ast.Try object at 0x7da207f02e30>
call[name[logger].debug, parameter[binary_operation[call[name[_], parameter[constant[Retrieve tile at %s]]] <ast.Mod object at 0x7da2590d6920> name[url]]]]
variable[r] assign[=] name[DOWNLOAD_RETRIES]
variable[sleeptime] assign[=] constant[1]
while compare[name[r] greater[>] constant[0]] begin[:]
<ast.Try object at 0x7da207f01360>
<ast.Raise object at 0x7da207f00b50> | keyword[def] identifier[tile] ( identifier[self] , identifier[z] , identifier[x] , identifier[y] ):
literal[string]
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )%(( identifier[z] , identifier[x] , identifier[y] ),))
identifier[size] = identifier[self] . identifier[tilesize]
identifier[s] = identifier[self] . identifier[tiles_subdomains] [( identifier[x] + identifier[y] )% identifier[len] ( identifier[self] . identifier[tiles_subdomains] )];
keyword[try] :
identifier[url] = identifier[self] . identifier[tiles_url] . identifier[format] (** identifier[locals] ())
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
keyword[raise] identifier[DownloadError] ( identifier[_] ( literal[string] )% identifier[e] )
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )% identifier[url] )
identifier[r] = identifier[DOWNLOAD_RETRIES]
identifier[sleeptime] = literal[int]
keyword[while] identifier[r] > literal[int] :
keyword[try] :
identifier[request] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[self] . identifier[headers] )
keyword[if] identifier[request] . identifier[status_code] == literal[int] :
keyword[return] identifier[request] . identifier[content]
keyword[raise] identifier[DownloadError] ( identifier[_] ( literal[string] )%( identifier[request] . identifier[status_code] , identifier[url] ))
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( identifier[_] ( literal[string] )%( identifier[r] , identifier[e] ))
identifier[r] -= literal[int]
identifier[time] . identifier[sleep] ( identifier[sleeptime] )
keyword[if] ( identifier[sleeptime] <= literal[int] ) keyword[and] ( identifier[r] % literal[int] == literal[int] ):
identifier[sleeptime] += literal[int]
keyword[raise] identifier[DownloadError] ( identifier[_] ( literal[string] )% identifier[url] ) | def tile(self, z, x, y):
"""
Download the specified tile from `tiles_url`
"""
logger.debug(_('Download tile %s') % ((z, x, y),))
# Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... )
size = self.tilesize
s = self.tiles_subdomains[(x + y) % len(self.tiles_subdomains)]
try:
url = self.tiles_url.format(**locals()) # depends on [control=['try'], data=[]]
except KeyError as e:
raise DownloadError(_('Unknown keyword %s in URL') % e) # depends on [control=['except'], data=['e']]
logger.debug(_('Retrieve tile at %s') % url)
r = DOWNLOAD_RETRIES
sleeptime = 1
while r > 0:
try:
request = requests.get(url, headers=self.headers)
if request.status_code == 200:
return request.content # depends on [control=['if'], data=[]]
raise DownloadError(_('Status code : %s, url : %s') % (request.status_code, url)) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
logger.debug(_('Download error, retry (%s left). (%s)') % (r, e))
r -= 1
time.sleep(sleeptime)
# progressivly sleep longer to wait for this tile
if sleeptime <= 10 and r % 2 == 0:
sleeptime += 1 # increase wait # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=['r']]
raise DownloadError(_('Cannot download URL %s') % url) |
def calczmax(self):
"""
NAME:
calczmax
PURPOSE:
calculate the maximum height
INPUT:
OUTPUT:
zmax
HISTORY:
2012-06-01 - Written - Bovy (IAS)
"""
if hasattr(self,'_zmax'): #pragma: no cover
return self._zmax
Ez= calcEz(self._z,self._vz,self._verticalpot)
if self._vz == 0.: #We are exactly at the maximum height
zmax= nu.fabs(self._z)
else:
zstart= self._z
try:
zend= _zmaxFindStart(self._z,Ez,self._verticalpot)
except OverflowError: #pragma: no cover
zmax= -9999.99
else:
zmax= optimize.brentq(_zmaxEq,zstart,zend,
(Ez,self._verticalpot))
self._zmax= zmax
return self._zmax | def function[calczmax, parameter[self]]:
constant[
NAME:
calczmax
PURPOSE:
calculate the maximum height
INPUT:
OUTPUT:
zmax
HISTORY:
2012-06-01 - Written - Bovy (IAS)
]
if call[name[hasattr], parameter[name[self], constant[_zmax]]] begin[:]
return[name[self]._zmax]
variable[Ez] assign[=] call[name[calcEz], parameter[name[self]._z, name[self]._vz, name[self]._verticalpot]]
if compare[name[self]._vz equal[==] constant[0.0]] begin[:]
variable[zmax] assign[=] call[name[nu].fabs, parameter[name[self]._z]]
name[self]._zmax assign[=] name[zmax]
return[name[self]._zmax] | keyword[def] identifier[calczmax] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] identifier[self] . identifier[_zmax]
identifier[Ez] = identifier[calcEz] ( identifier[self] . identifier[_z] , identifier[self] . identifier[_vz] , identifier[self] . identifier[_verticalpot] )
keyword[if] identifier[self] . identifier[_vz] == literal[int] :
identifier[zmax] = identifier[nu] . identifier[fabs] ( identifier[self] . identifier[_z] )
keyword[else] :
identifier[zstart] = identifier[self] . identifier[_z]
keyword[try] :
identifier[zend] = identifier[_zmaxFindStart] ( identifier[self] . identifier[_z] , identifier[Ez] , identifier[self] . identifier[_verticalpot] )
keyword[except] identifier[OverflowError] :
identifier[zmax] =- literal[int]
keyword[else] :
identifier[zmax] = identifier[optimize] . identifier[brentq] ( identifier[_zmaxEq] , identifier[zstart] , identifier[zend] ,
( identifier[Ez] , identifier[self] . identifier[_verticalpot] ))
identifier[self] . identifier[_zmax] = identifier[zmax]
keyword[return] identifier[self] . identifier[_zmax] | def calczmax(self):
"""
NAME:
calczmax
PURPOSE:
calculate the maximum height
INPUT:
OUTPUT:
zmax
HISTORY:
2012-06-01 - Written - Bovy (IAS)
"""
if hasattr(self, '_zmax'): #pragma: no cover
return self._zmax # depends on [control=['if'], data=[]]
Ez = calcEz(self._z, self._vz, self._verticalpot)
if self._vz == 0.0: #We are exactly at the maximum height
zmax = nu.fabs(self._z) # depends on [control=['if'], data=[]]
else:
zstart = self._z
try:
zend = _zmaxFindStart(self._z, Ez, self._verticalpot) # depends on [control=['try'], data=[]]
except OverflowError: #pragma: no cover
zmax = -9999.99 # depends on [control=['except'], data=[]]
else:
zmax = optimize.brentq(_zmaxEq, zstart, zend, (Ez, self._verticalpot))
self._zmax = zmax
return self._zmax |
def GetClientsForHash(cls, hash_obj, token=None, age=aff4.NEWEST_TIME):
"""Yields client_files for the specified file store hash.
Args:
hash_obj: RDFURN that we want to get hits for.
token: Security token.
age: AFF4 age specification. Only get hits corresponding to the given age
spec. Should be aff4.NEWEST_TIME or a time range given as a tuple
(start, end) in microseconds since Jan 1st, 1970. If just a microseconds
value is given it's treated as the higher end of the range, i.e. (0,
age). See aff4.FACTORY.ParseAgeSpecification for details.
Yields:
RDFURNs corresponding to a client file that has the hash.
Raises:
ValueError: if age was set to aff4.ALL_TIMES.
"""
if age == aff4.ALL_TIMES:
raise ValueError("age==aff4.ALL_TIMES is not supported.")
results = cls.GetClientsForHashes([hash_obj], token=token, age=age)
for _, client_files in results:
for client_file in client_files:
yield client_file | def function[GetClientsForHash, parameter[cls, hash_obj, token, age]]:
constant[Yields client_files for the specified file store hash.
Args:
hash_obj: RDFURN that we want to get hits for.
token: Security token.
age: AFF4 age specification. Only get hits corresponding to the given age
spec. Should be aff4.NEWEST_TIME or a time range given as a tuple
(start, end) in microseconds since Jan 1st, 1970. If just a microseconds
value is given it's treated as the higher end of the range, i.e. (0,
age). See aff4.FACTORY.ParseAgeSpecification for details.
Yields:
RDFURNs corresponding to a client file that has the hash.
Raises:
ValueError: if age was set to aff4.ALL_TIMES.
]
if compare[name[age] equal[==] name[aff4].ALL_TIMES] begin[:]
<ast.Raise object at 0x7da1b1b0d2a0>
variable[results] assign[=] call[name[cls].GetClientsForHashes, parameter[list[[<ast.Name object at 0x7da1b1c258d0>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1b44160>, <ast.Name object at 0x7da1b1b444c0>]]] in starred[name[results]] begin[:]
for taget[name[client_file]] in starred[name[client_files]] begin[:]
<ast.Yield object at 0x7da1b1b45f00> | keyword[def] identifier[GetClientsForHash] ( identifier[cls] , identifier[hash_obj] , identifier[token] = keyword[None] , identifier[age] = identifier[aff4] . identifier[NEWEST_TIME] ):
literal[string]
keyword[if] identifier[age] == identifier[aff4] . identifier[ALL_TIMES] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[results] = identifier[cls] . identifier[GetClientsForHashes] ([ identifier[hash_obj] ], identifier[token] = identifier[token] , identifier[age] = identifier[age] )
keyword[for] identifier[_] , identifier[client_files] keyword[in] identifier[results] :
keyword[for] identifier[client_file] keyword[in] identifier[client_files] :
keyword[yield] identifier[client_file] | def GetClientsForHash(cls, hash_obj, token=None, age=aff4.NEWEST_TIME):
"""Yields client_files for the specified file store hash.
Args:
hash_obj: RDFURN that we want to get hits for.
token: Security token.
age: AFF4 age specification. Only get hits corresponding to the given age
spec. Should be aff4.NEWEST_TIME or a time range given as a tuple
(start, end) in microseconds since Jan 1st, 1970. If just a microseconds
value is given it's treated as the higher end of the range, i.e. (0,
age). See aff4.FACTORY.ParseAgeSpecification for details.
Yields:
RDFURNs corresponding to a client file that has the hash.
Raises:
ValueError: if age was set to aff4.ALL_TIMES.
"""
if age == aff4.ALL_TIMES:
raise ValueError('age==aff4.ALL_TIMES is not supported.') # depends on [control=['if'], data=[]]
results = cls.GetClientsForHashes([hash_obj], token=token, age=age)
for (_, client_files) in results:
for client_file in client_files:
yield client_file # depends on [control=['for'], data=['client_file']] # depends on [control=['for'], data=[]] |
def handle_incoming_message(self, msg):
"""
Start or cancel a job, based on the msg.
If msg.type == MessageType.START_JOB, then start the job given by msg.job.
If msg.type == MessageType.CANCEL_JOB, then try to cancel the job given by msg.job.job_id.
Args:
msg (barbequeue.messaging.classes.Message):
Returns: None
"""
if msg.type == MessageType.START_JOB:
job = msg.message['job']
self.schedule_job(job)
elif msg.type == MessageType.CANCEL_JOB:
job_id = msg.message['job_id']
self.cancel(job_id) | def function[handle_incoming_message, parameter[self, msg]]:
constant[
Start or cancel a job, based on the msg.
If msg.type == MessageType.START_JOB, then start the job given by msg.job.
If msg.type == MessageType.CANCEL_JOB, then try to cancel the job given by msg.job.job_id.
Args:
msg (barbequeue.messaging.classes.Message):
Returns: None
]
if compare[name[msg].type equal[==] name[MessageType].START_JOB] begin[:]
variable[job] assign[=] call[name[msg].message][constant[job]]
call[name[self].schedule_job, parameter[name[job]]] | keyword[def] identifier[handle_incoming_message] ( identifier[self] , identifier[msg] ):
literal[string]
keyword[if] identifier[msg] . identifier[type] == identifier[MessageType] . identifier[START_JOB] :
identifier[job] = identifier[msg] . identifier[message] [ literal[string] ]
identifier[self] . identifier[schedule_job] ( identifier[job] )
keyword[elif] identifier[msg] . identifier[type] == identifier[MessageType] . identifier[CANCEL_JOB] :
identifier[job_id] = identifier[msg] . identifier[message] [ literal[string] ]
identifier[self] . identifier[cancel] ( identifier[job_id] ) | def handle_incoming_message(self, msg):
"""
Start or cancel a job, based on the msg.
If msg.type == MessageType.START_JOB, then start the job given by msg.job.
If msg.type == MessageType.CANCEL_JOB, then try to cancel the job given by msg.job.job_id.
Args:
msg (barbequeue.messaging.classes.Message):
Returns: None
"""
if msg.type == MessageType.START_JOB:
job = msg.message['job']
self.schedule_job(job) # depends on [control=['if'], data=[]]
elif msg.type == MessageType.CANCEL_JOB:
job_id = msg.message['job_id']
self.cancel(job_id) # depends on [control=['if'], data=[]] |
def send_response(self, body, set_content_type=True):
"""
Serialize and send ``body`` in the response.
:param dict body: the body to serialize
:param bool set_content_type: should the :http:header:`Content-Type`
header be set? Defaults to :data:`True`
"""
settings = get_settings(self.application, force_instance=True)
handler = settings[self.get_response_content_type()]
content_type, data_bytes = handler.to_bytes(body)
if set_content_type:
self.set_header('Content-Type', content_type)
self.add_header('Vary', 'Accept')
self.write(data_bytes) | def function[send_response, parameter[self, body, set_content_type]]:
constant[
Serialize and send ``body`` in the response.
:param dict body: the body to serialize
:param bool set_content_type: should the :http:header:`Content-Type`
header be set? Defaults to :data:`True`
]
variable[settings] assign[=] call[name[get_settings], parameter[name[self].application]]
variable[handler] assign[=] call[name[settings]][call[name[self].get_response_content_type, parameter[]]]
<ast.Tuple object at 0x7da204565600> assign[=] call[name[handler].to_bytes, parameter[name[body]]]
if name[set_content_type] begin[:]
call[name[self].set_header, parameter[constant[Content-Type], name[content_type]]]
call[name[self].add_header, parameter[constant[Vary], constant[Accept]]]
call[name[self].write, parameter[name[data_bytes]]] | keyword[def] identifier[send_response] ( identifier[self] , identifier[body] , identifier[set_content_type] = keyword[True] ):
literal[string]
identifier[settings] = identifier[get_settings] ( identifier[self] . identifier[application] , identifier[force_instance] = keyword[True] )
identifier[handler] = identifier[settings] [ identifier[self] . identifier[get_response_content_type] ()]
identifier[content_type] , identifier[data_bytes] = identifier[handler] . identifier[to_bytes] ( identifier[body] )
keyword[if] identifier[set_content_type] :
identifier[self] . identifier[set_header] ( literal[string] , identifier[content_type] )
identifier[self] . identifier[add_header] ( literal[string] , literal[string] )
identifier[self] . identifier[write] ( identifier[data_bytes] ) | def send_response(self, body, set_content_type=True):
"""
Serialize and send ``body`` in the response.
:param dict body: the body to serialize
:param bool set_content_type: should the :http:header:`Content-Type`
header be set? Defaults to :data:`True`
"""
settings = get_settings(self.application, force_instance=True)
handler = settings[self.get_response_content_type()]
(content_type, data_bytes) = handler.to_bytes(body)
if set_content_type:
self.set_header('Content-Type', content_type)
self.add_header('Vary', 'Accept') # depends on [control=['if'], data=[]]
self.write(data_bytes) |
def get_compute(self, compute=None, **kwargs):
"""
Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if compute is not None:
kwargs['compute'] = compute
kwargs['context'] = 'compute'
return self.filter(**kwargs) | def function[get_compute, parameter[self, compute]]:
constant[
Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
]
if compare[name[compute] is_not constant[None]] begin[:]
call[name[kwargs]][constant[compute]] assign[=] name[compute]
call[name[kwargs]][constant[context]] assign[=] constant[compute]
return[call[name[self].filter, parameter[]]] | keyword[def] identifier[get_compute] ( identifier[self] , identifier[compute] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[compute] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ literal[string] ]= identifier[compute]
identifier[kwargs] [ literal[string] ]= literal[string]
keyword[return] identifier[self] . identifier[filter] (** identifier[kwargs] ) | def get_compute(self, compute=None, **kwargs):
"""
Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet`
"""
if compute is not None:
kwargs['compute'] = compute # depends on [control=['if'], data=['compute']]
kwargs['context'] = 'compute'
return self.filter(**kwargs) |
def expandranges(parlist):
"""Rewrite a list of parameters by expanding ranges (e.g., log10_efac{1-10}) into
individual parameters."""
ret = []
for par in parlist:
# match anything of the form XXX{number1-number2}
m = re.match('(.*)\{([0-9]+)\-([0-9]+)\}',par)
if m is None:
ret.append(par)
else:
# (these are strings)
root, number1, number2 = m.group(1), m.group(2), m.group(3)
# if number1 begins with 0s, number parameters as 00, 01, 02, ...,
# otherwise go with 0, 1, 2, ...
fmt = '{{0}}{{1:0{0}d}}'.format(len(number1)) if number1[0] == '0' else '{0}{1:d}'
ret = ret + [fmt.format(root,i) for i in range(int(m.group(2)),int(m.group(3))+1)]
return ret | def function[expandranges, parameter[parlist]]:
constant[Rewrite a list of parameters by expanding ranges (e.g., log10_efac{1-10}) into
individual parameters.]
variable[ret] assign[=] list[[]]
for taget[name[par]] in starred[name[parlist]] begin[:]
variable[m] assign[=] call[name[re].match, parameter[constant[(.*)\{([0-9]+)\-([0-9]+)\}], name[par]]]
if compare[name[m] is constant[None]] begin[:]
call[name[ret].append, parameter[name[par]]]
return[name[ret]] | keyword[def] identifier[expandranges] ( identifier[parlist] ):
literal[string]
identifier[ret] =[]
keyword[for] identifier[par] keyword[in] identifier[parlist] :
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[par] )
keyword[if] identifier[m] keyword[is] keyword[None] :
identifier[ret] . identifier[append] ( identifier[par] )
keyword[else] :
identifier[root] , identifier[number1] , identifier[number2] = identifier[m] . identifier[group] ( literal[int] ), identifier[m] . identifier[group] ( literal[int] ), identifier[m] . identifier[group] ( literal[int] )
identifier[fmt] = literal[string] . identifier[format] ( identifier[len] ( identifier[number1] )) keyword[if] identifier[number1] [ literal[int] ]== literal[string] keyword[else] literal[string]
identifier[ret] = identifier[ret] +[ identifier[fmt] . identifier[format] ( identifier[root] , identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[m] . identifier[group] ( literal[int] )), identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))+ literal[int] )]
keyword[return] identifier[ret] | def expandranges(parlist):
"""Rewrite a list of parameters by expanding ranges (e.g., log10_efac{1-10}) into
individual parameters."""
ret = []
for par in parlist:
# match anything of the form XXX{number1-number2}
m = re.match('(.*)\\{([0-9]+)\\-([0-9]+)\\}', par)
if m is None:
ret.append(par) # depends on [control=['if'], data=[]]
else:
# (these are strings)
(root, number1, number2) = (m.group(1), m.group(2), m.group(3))
# if number1 begins with 0s, number parameters as 00, 01, 02, ...,
# otherwise go with 0, 1, 2, ...
fmt = '{{0}}{{1:0{0}d}}'.format(len(number1)) if number1[0] == '0' else '{0}{1:d}'
ret = ret + [fmt.format(root, i) for i in range(int(m.group(2)), int(m.group(3)) + 1)] # depends on [control=['for'], data=['par']]
return ret |
def issue(self, issue_id):
"""Get the information of the given issue.
:param issue_id: issue identifier
"""
resource = urijoin(self.RISSUES, str(issue_id) + self.CJSON)
params = {
self.PINCLUDE: ','.join([self.CATTACHMENTS, self.CCHANGESETS,
self.CCHILDREN, self.CJOURNALS,
self.CRELATIONS, self.CWATCHERS])
}
response = self._call(resource, params)
return response | def function[issue, parameter[self, issue_id]]:
constant[Get the information of the given issue.
:param issue_id: issue identifier
]
variable[resource] assign[=] call[name[urijoin], parameter[name[self].RISSUES, binary_operation[call[name[str], parameter[name[issue_id]]] + name[self].CJSON]]]
variable[params] assign[=] dictionary[[<ast.Attribute object at 0x7da1b020f4f0>], [<ast.Call object at 0x7da1b020e3e0>]]
variable[response] assign[=] call[name[self]._call, parameter[name[resource], name[params]]]
return[name[response]] | keyword[def] identifier[issue] ( identifier[self] , identifier[issue_id] ):
literal[string]
identifier[resource] = identifier[urijoin] ( identifier[self] . identifier[RISSUES] , identifier[str] ( identifier[issue_id] )+ identifier[self] . identifier[CJSON] )
identifier[params] ={
identifier[self] . identifier[PINCLUDE] : literal[string] . identifier[join] ([ identifier[self] . identifier[CATTACHMENTS] , identifier[self] . identifier[CCHANGESETS] ,
identifier[self] . identifier[CCHILDREN] , identifier[self] . identifier[CJOURNALS] ,
identifier[self] . identifier[CRELATIONS] , identifier[self] . identifier[CWATCHERS] ])
}
identifier[response] = identifier[self] . identifier[_call] ( identifier[resource] , identifier[params] )
keyword[return] identifier[response] | def issue(self, issue_id):
"""Get the information of the given issue.
:param issue_id: issue identifier
"""
resource = urijoin(self.RISSUES, str(issue_id) + self.CJSON)
params = {self.PINCLUDE: ','.join([self.CATTACHMENTS, self.CCHANGESETS, self.CCHILDREN, self.CJOURNALS, self.CRELATIONS, self.CWATCHERS])}
response = self._call(resource, params)
return response |
def operator_relocate(self, graph, solution, op_diff_round_digits, anim):
"""applies Relocate inter-route operator to solution
Takes every node from every route and calculates savings when inserted
into all possible positions in other routes. Insertion is done at
position with max. saving and procedure starts over again with newly
created graph as input. Stops when no improvement is found.
Args
----
graph: :networkx:`NetworkX Graph Obj< >`
A NetworkX graaph is used.
solution: BaseSolution
BaseSolution instance
op_diff_round_digits: float
Precision (floating point digits) for rounding route length differences.
*Details*: In some cases when an exchange is performed on two routes with one node each,
the difference between the both solutions (before and after the exchange) is not zero.
This is due to internal rounding errors of float type. So the loop won't break
(alternating between these two solutions), we need an additional criterion to avoid
this behaviour: A threshold to handle values very close to zero as if they were zero
(for a more detailed description of the matter see http://floating-point-gui.de or
https://docs.python.org/3.5/tutorial/floatingpoint.html)
anim: AnimationDing0
AnimationDing0 object
Returns
-------
LocalSearchSolution
A solution (LocalSearchSolution class)
Notes
-----
(Inner) Loop variables:
* i: node that is checked for possible moves (position in the route `tour`, not node name)
* j: node that precedes the insert position in target route (position in the route `target_tour`, not node name)
Todo
----
* Remove ugly nested loops, convert to more efficient matrix operations
"""
# shorter var names for loop
dm = graph._matrix
dn = graph._nodes
# Relocate: Search better solutions by checking possible node moves
while True:
length_diff_best = 0
for route in solution.routes():
# exclude origin routes with single high-demand nodes (Load Areas)
if len(route._nodes) == 1:
if solution._problem._is_aggregated[str(route._nodes[0])]:
continue
# create tour by adding depot at start and end
tour = [graph._depot] + route._nodes + [graph._depot]
for target_route in solution.routes():
# exclude (origin+target) routes with single high-demand nodes (Load Areas)
if len(target_route._nodes) == 1:
if solution._problem._is_aggregated[str(target_route._nodes[0])]:
continue
target_tour = [graph._depot] + target_route._nodes + [graph._depot]
if route == target_route:
continue
n = len(route._nodes)
nt = len(target_route._nodes)+1
for i in range(0,n):
node = route._nodes[i]
for j in range(0,nt):
#target_node = target_route._nodes[j]
if target_route.can_allocate([node]):
length_diff = (-dm[dn[tour[i].name()]][dn[tour[i+1].name()]] -
dm[dn[tour[i+1].name()]][dn[tour[i+2].name()]] +
dm[dn[tour[i].name()]][dn[tour[i+2].name()]] +
dm[dn[target_tour[j].name()]][dn[tour[i+1].name()]] +
dm[dn[tour[i+1].name()]][dn[target_tour[j+1].name()]] -
dm[dn[target_tour[j].name()]][dn[target_tour[j+1].name()]])
if length_diff < length_diff_best:
length_diff_best = length_diff
node_best, target_route_best, j_best = node, target_route, j
if length_diff_best < 0:
# insert new node
target_route_best.insert([node_best], j_best)
# remove empty routes from solution
solution._routes = [route for route in solution._routes if route._nodes]
if anim is not None:
solution.draw_network(anim)
#print('Bessere Loesung gefunden:', node_best, target_node_best, target_route_best, length_diff_best)
# no improvement found
if round(length_diff_best, op_diff_round_digits) == 0:
break
return solution | def function[operator_relocate, parameter[self, graph, solution, op_diff_round_digits, anim]]:
constant[applies Relocate inter-route operator to solution
Takes every node from every route and calculates savings when inserted
into all possible positions in other routes. Insertion is done at
position with max. saving and procedure starts over again with newly
created graph as input. Stops when no improvement is found.
Args
----
graph: :networkx:`NetworkX Graph Obj< >`
A NetworkX graaph is used.
solution: BaseSolution
BaseSolution instance
op_diff_round_digits: float
Precision (floating point digits) for rounding route length differences.
*Details*: In some cases when an exchange is performed on two routes with one node each,
the difference between the both solutions (before and after the exchange) is not zero.
This is due to internal rounding errors of float type. So the loop won't break
(alternating between these two solutions), we need an additional criterion to avoid
this behaviour: A threshold to handle values very close to zero as if they were zero
(for a more detailed description of the matter see http://floating-point-gui.de or
https://docs.python.org/3.5/tutorial/floatingpoint.html)
anim: AnimationDing0
AnimationDing0 object
Returns
-------
LocalSearchSolution
A solution (LocalSearchSolution class)
Notes
-----
(Inner) Loop variables:
* i: node that is checked for possible moves (position in the route `tour`, not node name)
* j: node that precedes the insert position in target route (position in the route `target_tour`, not node name)
Todo
----
* Remove ugly nested loops, convert to more efficient matrix operations
]
variable[dm] assign[=] name[graph]._matrix
variable[dn] assign[=] name[graph]._nodes
while constant[True] begin[:]
variable[length_diff_best] assign[=] constant[0]
for taget[name[route]] in starred[call[name[solution].routes, parameter[]]] begin[:]
if compare[call[name[len], parameter[name[route]._nodes]] equal[==] constant[1]] begin[:]
if call[name[solution]._problem._is_aggregated][call[name[str], parameter[call[name[route]._nodes][constant[0]]]]] begin[:]
continue
variable[tour] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da18f812f50>]] + name[route]._nodes] + list[[<ast.Attribute object at 0x7da18f813850>]]]
for taget[name[target_route]] in starred[call[name[solution].routes, parameter[]]] begin[:]
if compare[call[name[len], parameter[name[target_route]._nodes]] equal[==] constant[1]] begin[:]
if call[name[solution]._problem._is_aggregated][call[name[str], parameter[call[name[target_route]._nodes][constant[0]]]]] begin[:]
continue
variable[target_tour] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da18f812740>]] + name[target_route]._nodes] + list[[<ast.Attribute object at 0x7da18f812560>]]]
if compare[name[route] equal[==] name[target_route]] begin[:]
continue
variable[n] assign[=] call[name[len], parameter[name[route]._nodes]]
variable[nt] assign[=] binary_operation[call[name[len], parameter[name[target_route]._nodes]] + constant[1]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[n]]]] begin[:]
variable[node] assign[=] call[name[route]._nodes][name[i]]
for taget[name[j]] in starred[call[name[range], parameter[constant[0], name[nt]]]] begin[:]
if call[name[target_route].can_allocate, parameter[list[[<ast.Name object at 0x7da18f813a30>]]]] begin[:]
variable[length_diff] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f8134c0> - call[call[name[dm]][call[name[dn]][call[call[name[tour]][binary_operation[name[i] + constant[1]]].name, parameter[]]]]][call[name[dn]][call[call[name[tour]][binary_operation[name[i] + constant[2]]].name, parameter[]]]]] + call[call[name[dm]][call[name[dn]][call[call[name[tour]][name[i]].name, parameter[]]]]][call[name[dn]][call[call[name[tour]][binary_operation[name[i] + constant[2]]].name, parameter[]]]]] + call[call[name[dm]][call[name[dn]][call[call[name[target_tour]][name[j]].name, parameter[]]]]][call[name[dn]][call[call[name[tour]][binary_operation[name[i] + constant[1]]].name, parameter[]]]]] + call[call[name[dm]][call[name[dn]][call[call[name[tour]][binary_operation[name[i] + constant[1]]].name, parameter[]]]]][call[name[dn]][call[call[name[target_tour]][binary_operation[name[j] + constant[1]]].name, parameter[]]]]] - call[call[name[dm]][call[name[dn]][call[call[name[target_tour]][name[j]].name, parameter[]]]]][call[name[dn]][call[call[name[target_tour]][binary_operation[name[j] + constant[1]]].name, parameter[]]]]]
if compare[name[length_diff] less[<] name[length_diff_best]] begin[:]
variable[length_diff_best] assign[=] name[length_diff]
<ast.Tuple object at 0x7da18f813880> assign[=] tuple[[<ast.Name object at 0x7da18f812a70>, <ast.Name object at 0x7da18f810100>, <ast.Name object at 0x7da18f810340>]]
if compare[name[length_diff_best] less[<] constant[0]] begin[:]
call[name[target_route_best].insert, parameter[list[[<ast.Name object at 0x7da18f810f70>]], name[j_best]]]
name[solution]._routes assign[=] <ast.ListComp object at 0x7da18f812bf0>
if compare[name[anim] is_not constant[None]] begin[:]
call[name[solution].draw_network, parameter[name[anim]]]
if compare[call[name[round], parameter[name[length_diff_best], name[op_diff_round_digits]]] equal[==] constant[0]] begin[:]
break
return[name[solution]] | keyword[def] identifier[operator_relocate] ( identifier[self] , identifier[graph] , identifier[solution] , identifier[op_diff_round_digits] , identifier[anim] ):
literal[string]
identifier[dm] = identifier[graph] . identifier[_matrix]
identifier[dn] = identifier[graph] . identifier[_nodes]
keyword[while] keyword[True] :
identifier[length_diff_best] = literal[int]
keyword[for] identifier[route] keyword[in] identifier[solution] . identifier[routes] ():
keyword[if] identifier[len] ( identifier[route] . identifier[_nodes] )== literal[int] :
keyword[if] identifier[solution] . identifier[_problem] . identifier[_is_aggregated] [ identifier[str] ( identifier[route] . identifier[_nodes] [ literal[int] ])]:
keyword[continue]
identifier[tour] =[ identifier[graph] . identifier[_depot] ]+ identifier[route] . identifier[_nodes] +[ identifier[graph] . identifier[_depot] ]
keyword[for] identifier[target_route] keyword[in] identifier[solution] . identifier[routes] ():
keyword[if] identifier[len] ( identifier[target_route] . identifier[_nodes] )== literal[int] :
keyword[if] identifier[solution] . identifier[_problem] . identifier[_is_aggregated] [ identifier[str] ( identifier[target_route] . identifier[_nodes] [ literal[int] ])]:
keyword[continue]
identifier[target_tour] =[ identifier[graph] . identifier[_depot] ]+ identifier[target_route] . identifier[_nodes] +[ identifier[graph] . identifier[_depot] ]
keyword[if] identifier[route] == identifier[target_route] :
keyword[continue]
identifier[n] = identifier[len] ( identifier[route] . identifier[_nodes] )
identifier[nt] = identifier[len] ( identifier[target_route] . identifier[_nodes] )+ literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] ):
identifier[node] = identifier[route] . identifier[_nodes] [ identifier[i] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[nt] ):
keyword[if] identifier[target_route] . identifier[can_allocate] ([ identifier[node] ]):
identifier[length_diff] =(- identifier[dm] [ identifier[dn] [ identifier[tour] [ identifier[i] ]. identifier[name] ()]][ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]]-
identifier[dm] [ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]][ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]]+
identifier[dm] [ identifier[dn] [ identifier[tour] [ identifier[i] ]. identifier[name] ()]][ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]]+
identifier[dm] [ identifier[dn] [ identifier[target_tour] [ identifier[j] ]. identifier[name] ()]][ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]]+
identifier[dm] [ identifier[dn] [ identifier[tour] [ identifier[i] + literal[int] ]. identifier[name] ()]][ identifier[dn] [ identifier[target_tour] [ identifier[j] + literal[int] ]. identifier[name] ()]]-
identifier[dm] [ identifier[dn] [ identifier[target_tour] [ identifier[j] ]. identifier[name] ()]][ identifier[dn] [ identifier[target_tour] [ identifier[j] + literal[int] ]. identifier[name] ()]])
keyword[if] identifier[length_diff] < identifier[length_diff_best] :
identifier[length_diff_best] = identifier[length_diff]
identifier[node_best] , identifier[target_route_best] , identifier[j_best] = identifier[node] , identifier[target_route] , identifier[j]
keyword[if] identifier[length_diff_best] < literal[int] :
identifier[target_route_best] . identifier[insert] ([ identifier[node_best] ], identifier[j_best] )
identifier[solution] . identifier[_routes] =[ identifier[route] keyword[for] identifier[route] keyword[in] identifier[solution] . identifier[_routes] keyword[if] identifier[route] . identifier[_nodes] ]
keyword[if] identifier[anim] keyword[is] keyword[not] keyword[None] :
identifier[solution] . identifier[draw_network] ( identifier[anim] )
keyword[if] identifier[round] ( identifier[length_diff_best] , identifier[op_diff_round_digits] )== literal[int] :
keyword[break]
keyword[return] identifier[solution] | def operator_relocate(self, graph, solution, op_diff_round_digits, anim):
"""applies Relocate inter-route operator to solution
Takes every node from every route and calculates savings when inserted
into all possible positions in other routes. Insertion is done at
position with max. saving and procedure starts over again with newly
created graph as input. Stops when no improvement is found.
Args
----
graph: :networkx:`NetworkX Graph Obj< >`
A NetworkX graaph is used.
solution: BaseSolution
BaseSolution instance
op_diff_round_digits: float
Precision (floating point digits) for rounding route length differences.
*Details*: In some cases when an exchange is performed on two routes with one node each,
the difference between the both solutions (before and after the exchange) is not zero.
This is due to internal rounding errors of float type. So the loop won't break
(alternating between these two solutions), we need an additional criterion to avoid
this behaviour: A threshold to handle values very close to zero as if they were zero
(for a more detailed description of the matter see http://floating-point-gui.de or
https://docs.python.org/3.5/tutorial/floatingpoint.html)
anim: AnimationDing0
AnimationDing0 object
Returns
-------
LocalSearchSolution
A solution (LocalSearchSolution class)
Notes
-----
(Inner) Loop variables:
* i: node that is checked for possible moves (position in the route `tour`, not node name)
* j: node that precedes the insert position in target route (position in the route `target_tour`, not node name)
Todo
----
* Remove ugly nested loops, convert to more efficient matrix operations
"""
# shorter var names for loop
dm = graph._matrix
dn = graph._nodes
# Relocate: Search better solutions by checking possible node moves
while True:
length_diff_best = 0
for route in solution.routes():
# exclude origin routes with single high-demand nodes (Load Areas)
if len(route._nodes) == 1:
if solution._problem._is_aggregated[str(route._nodes[0])]:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# create tour by adding depot at start and end
tour = [graph._depot] + route._nodes + [graph._depot]
for target_route in solution.routes():
# exclude (origin+target) routes with single high-demand nodes (Load Areas)
if len(target_route._nodes) == 1:
if solution._problem._is_aggregated[str(target_route._nodes[0])]:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
target_tour = [graph._depot] + target_route._nodes + [graph._depot]
if route == target_route:
continue # depends on [control=['if'], data=[]]
n = len(route._nodes)
nt = len(target_route._nodes) + 1
for i in range(0, n):
node = route._nodes[i]
for j in range(0, nt):
#target_node = target_route._nodes[j]
if target_route.can_allocate([node]):
length_diff = -dm[dn[tour[i].name()]][dn[tour[i + 1].name()]] - dm[dn[tour[i + 1].name()]][dn[tour[i + 2].name()]] + dm[dn[tour[i].name()]][dn[tour[i + 2].name()]] + dm[dn[target_tour[j].name()]][dn[tour[i + 1].name()]] + dm[dn[tour[i + 1].name()]][dn[target_tour[j + 1].name()]] - dm[dn[target_tour[j].name()]][dn[target_tour[j + 1].name()]]
if length_diff < length_diff_best:
length_diff_best = length_diff
(node_best, target_route_best, j_best) = (node, target_route, j) # depends on [control=['if'], data=['length_diff', 'length_diff_best']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['target_route']] # depends on [control=['for'], data=['route']]
if length_diff_best < 0:
# insert new node
target_route_best.insert([node_best], j_best)
# remove empty routes from solution
solution._routes = [route for route in solution._routes if route._nodes]
if anim is not None:
solution.draw_network(anim) # depends on [control=['if'], data=['anim']] # depends on [control=['if'], data=[]]
#print('Bessere Loesung gefunden:', node_best, target_node_best, target_route_best, length_diff_best)
# no improvement found
if round(length_diff_best, op_diff_round_digits) == 0:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return solution |
def get_events_map():
""" Prepare map of event subscribers.
* Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with
'set' action.
* Returns map of {before/after: {action: event class(es)}}
"""
from nefertari import events
set_keys = ('create', 'update', 'replace', 'update_many', 'register')
before_events = events.BEFORE_EVENTS.copy()
before_events['set'] = [before_events[key] for key in set_keys]
after_events = events.AFTER_EVENTS.copy()
after_events['set'] = [after_events[key] for key in set_keys]
return {
'before': before_events,
'after': after_events,
} | def function[get_events_map, parameter[]]:
constant[ Prepare map of event subscribers.
* Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with
'set' action.
* Returns map of {before/after: {action: event class(es)}}
]
from relative_module[nefertari] import module[events]
variable[set_keys] assign[=] tuple[[<ast.Constant object at 0x7da1b1120fa0>, <ast.Constant object at 0x7da1b1122440>, <ast.Constant object at 0x7da1b11233a0>, <ast.Constant object at 0x7da1b11200a0>, <ast.Constant object at 0x7da1b1121690>]]
variable[before_events] assign[=] call[name[events].BEFORE_EVENTS.copy, parameter[]]
call[name[before_events]][constant[set]] assign[=] <ast.ListComp object at 0x7da1b1120ac0>
variable[after_events] assign[=] call[name[events].AFTER_EVENTS.copy, parameter[]]
call[name[after_events]][constant[set]] assign[=] <ast.ListComp object at 0x7da1b1120af0>
return[dictionary[[<ast.Constant object at 0x7da1b1122b90>, <ast.Constant object at 0x7da1b1122ad0>], [<ast.Name object at 0x7da1b1120e20>, <ast.Name object at 0x7da1b1123850>]]] | keyword[def] identifier[get_events_map] ():
literal[string]
keyword[from] identifier[nefertari] keyword[import] identifier[events]
identifier[set_keys] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
identifier[before_events] = identifier[events] . identifier[BEFORE_EVENTS] . identifier[copy] ()
identifier[before_events] [ literal[string] ]=[ identifier[before_events] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[set_keys] ]
identifier[after_events] = identifier[events] . identifier[AFTER_EVENTS] . identifier[copy] ()
identifier[after_events] [ literal[string] ]=[ identifier[after_events] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[set_keys] ]
keyword[return] {
literal[string] : identifier[before_events] ,
literal[string] : identifier[after_events] ,
} | def get_events_map():
""" Prepare map of event subscribers.
* Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with
'set' action.
* Returns map of {before/after: {action: event class(es)}}
"""
from nefertari import events
set_keys = ('create', 'update', 'replace', 'update_many', 'register')
before_events = events.BEFORE_EVENTS.copy()
before_events['set'] = [before_events[key] for key in set_keys]
after_events = events.AFTER_EVENTS.copy()
after_events['set'] = [after_events[key] for key in set_keys]
return {'before': before_events, 'after': after_events} |
def body(self, body):
"""
Defines response body data.
Arguments:
body (str|bytes): response body to use.
Returns:
self: ``pook.Response`` current instance.
"""
if isinstance(body, bytes):
body = body.decode('utf-8')
self._body = body | def function[body, parameter[self, body]]:
constant[
Defines response body data.
Arguments:
body (str|bytes): response body to use.
Returns:
self: ``pook.Response`` current instance.
]
if call[name[isinstance], parameter[name[body], name[bytes]]] begin[:]
variable[body] assign[=] call[name[body].decode, parameter[constant[utf-8]]]
name[self]._body assign[=] name[body] | keyword[def] identifier[body] ( identifier[self] , identifier[body] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[body] , identifier[bytes] ):
identifier[body] = identifier[body] . identifier[decode] ( literal[string] )
identifier[self] . identifier[_body] = identifier[body] | def body(self, body):
"""
Defines response body data.
Arguments:
body (str|bytes): response body to use.
Returns:
self: ``pook.Response`` current instance.
"""
if isinstance(body, bytes):
body = body.decode('utf-8') # depends on [control=['if'], data=[]]
self._body = body |
def convert_convolution1d(builder, layer, input_names, output_names, keras_layer):
"""
Convert convolution layer from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
"""
# Get input and output names
input_name, output_name = (input_names[0], output_names[0])
has_bias = keras_layer.use_bias
# Get the weights from _keras.
# Keras stores convolution weights as list of numpy arrays
weightList = keras_layer.get_weights()
output_shape = list(filter(None, keras_layer.output_shape))[:-1]
# Parameter
filter_length, input_dim, n_filters = weightList[0].shape
stride_width = keras_layer.strides if type(keras_layer.strides) is int \
else keras_layer.strides[0]
# Weights and bias terms
W = _np.expand_dims(weightList[0],axis=0)
b = weightList[1] if has_bias else None
dilations = [1,1]
if (type(keras_layer.dilation_rate) is list) or \
(type(keras_layer.dilation_rate) is tuple):
dilations = [1, keras_layer.dilation_rate[0]]
else:
dilations = [1, keras_layer.dilation_rate]
keras_padding = keras_layer.padding
if keras_padding == 'causal':
builder.add_padding(name = layer + '__causal_pad__',
left = filter_length-1, right=0, top=0, bottom=0, value= 0,
input_name = input_name,
output_name= input_name + '__causal_pad__')
input_name = input_name + '__causal_pad__'
keras_padding = 'valid'
builder.add_convolution(name = layer,
kernel_channels = input_dim,
output_channels = n_filters,
height = 1,
width = filter_length,
stride_height = 1,
stride_width = stride_width,
border_mode = keras_padding,
groups = 1,
W = W,
b = b,
has_bias = has_bias,
is_deconv = False,
output_shape = output_shape,
input_name = input_name,
output_name = output_name,
dilation_factors = dilations) | def function[convert_convolution1d, parameter[builder, layer, input_names, output_names, keras_layer]]:
constant[
Convert convolution layer from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
]
<ast.Tuple object at 0x7da1b1ef3760> assign[=] tuple[[<ast.Subscript object at 0x7da1b1ef3100>, <ast.Subscript object at 0x7da1b1ef1d80>]]
variable[has_bias] assign[=] name[keras_layer].use_bias
variable[weightList] assign[=] call[name[keras_layer].get_weights, parameter[]]
variable[output_shape] assign[=] call[call[name[list], parameter[call[name[filter], parameter[constant[None], name[keras_layer].output_shape]]]]][<ast.Slice object at 0x7da1b203d6f0>]
<ast.Tuple object at 0x7da1b203f010> assign[=] call[name[weightList]][constant[0]].shape
variable[stride_width] assign[=] <ast.IfExp object at 0x7da1b203e9e0>
variable[W] assign[=] call[name[_np].expand_dims, parameter[call[name[weightList]][constant[0]]]]
variable[b] assign[=] <ast.IfExp object at 0x7da1b203cfa0>
variable[dilations] assign[=] list[[<ast.Constant object at 0x7da1b203d7b0>, <ast.Constant object at 0x7da1b203d0c0>]]
if <ast.BoolOp object at 0x7da1b203cb50> begin[:]
variable[dilations] assign[=] list[[<ast.Constant object at 0x7da1b203c7f0>, <ast.Subscript object at 0x7da1b203c700>]]
variable[keras_padding] assign[=] name[keras_layer].padding
if compare[name[keras_padding] equal[==] constant[causal]] begin[:]
call[name[builder].add_padding, parameter[]]
variable[input_name] assign[=] binary_operation[name[input_name] + constant[__causal_pad__]]
variable[keras_padding] assign[=] constant[valid]
call[name[builder].add_convolution, parameter[]] | keyword[def] identifier[convert_convolution1d] ( identifier[builder] , identifier[layer] , identifier[input_names] , identifier[output_names] , identifier[keras_layer] ):
literal[string]
identifier[input_name] , identifier[output_name] =( identifier[input_names] [ literal[int] ], identifier[output_names] [ literal[int] ])
identifier[has_bias] = identifier[keras_layer] . identifier[use_bias]
identifier[weightList] = identifier[keras_layer] . identifier[get_weights] ()
identifier[output_shape] = identifier[list] ( identifier[filter] ( keyword[None] , identifier[keras_layer] . identifier[output_shape] ))[:- literal[int] ]
identifier[filter_length] , identifier[input_dim] , identifier[n_filters] = identifier[weightList] [ literal[int] ]. identifier[shape]
identifier[stride_width] = identifier[keras_layer] . identifier[strides] keyword[if] identifier[type] ( identifier[keras_layer] . identifier[strides] ) keyword[is] identifier[int] keyword[else] identifier[keras_layer] . identifier[strides] [ literal[int] ]
identifier[W] = identifier[_np] . identifier[expand_dims] ( identifier[weightList] [ literal[int] ], identifier[axis] = literal[int] )
identifier[b] = identifier[weightList] [ literal[int] ] keyword[if] identifier[has_bias] keyword[else] keyword[None]
identifier[dilations] =[ literal[int] , literal[int] ]
keyword[if] ( identifier[type] ( identifier[keras_layer] . identifier[dilation_rate] ) keyword[is] identifier[list] ) keyword[or] ( identifier[type] ( identifier[keras_layer] . identifier[dilation_rate] ) keyword[is] identifier[tuple] ):
identifier[dilations] =[ literal[int] , identifier[keras_layer] . identifier[dilation_rate] [ literal[int] ]]
keyword[else] :
identifier[dilations] =[ literal[int] , identifier[keras_layer] . identifier[dilation_rate] ]
identifier[keras_padding] = identifier[keras_layer] . identifier[padding]
keyword[if] identifier[keras_padding] == literal[string] :
identifier[builder] . identifier[add_padding] ( identifier[name] = identifier[layer] + literal[string] ,
identifier[left] = identifier[filter_length] - literal[int] , identifier[right] = literal[int] , identifier[top] = literal[int] , identifier[bottom] = literal[int] , identifier[value] = literal[int] ,
identifier[input_name] = identifier[input_name] ,
identifier[output_name] = identifier[input_name] + literal[string] )
identifier[input_name] = identifier[input_name] + literal[string]
identifier[keras_padding] = literal[string]
identifier[builder] . identifier[add_convolution] ( identifier[name] = identifier[layer] ,
identifier[kernel_channels] = identifier[input_dim] ,
identifier[output_channels] = identifier[n_filters] ,
identifier[height] = literal[int] ,
identifier[width] = identifier[filter_length] ,
identifier[stride_height] = literal[int] ,
identifier[stride_width] = identifier[stride_width] ,
identifier[border_mode] = identifier[keras_padding] ,
identifier[groups] = literal[int] ,
identifier[W] = identifier[W] ,
identifier[b] = identifier[b] ,
identifier[has_bias] = identifier[has_bias] ,
identifier[is_deconv] = keyword[False] ,
identifier[output_shape] = identifier[output_shape] ,
identifier[input_name] = identifier[input_name] ,
identifier[output_name] = identifier[output_name] ,
identifier[dilation_factors] = identifier[dilations] ) | def convert_convolution1d(builder, layer, input_names, output_names, keras_layer):
"""
Convert convolution layer from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
"""
# Get input and output names
(input_name, output_name) = (input_names[0], output_names[0])
has_bias = keras_layer.use_bias
# Get the weights from _keras.
# Keras stores convolution weights as list of numpy arrays
weightList = keras_layer.get_weights()
output_shape = list(filter(None, keras_layer.output_shape))[:-1]
# Parameter
(filter_length, input_dim, n_filters) = weightList[0].shape
stride_width = keras_layer.strides if type(keras_layer.strides) is int else keras_layer.strides[0]
# Weights and bias terms
W = _np.expand_dims(weightList[0], axis=0)
b = weightList[1] if has_bias else None
dilations = [1, 1]
if type(keras_layer.dilation_rate) is list or type(keras_layer.dilation_rate) is tuple:
dilations = [1, keras_layer.dilation_rate[0]] # depends on [control=['if'], data=[]]
else:
dilations = [1, keras_layer.dilation_rate]
keras_padding = keras_layer.padding
if keras_padding == 'causal':
builder.add_padding(name=layer + '__causal_pad__', left=filter_length - 1, right=0, top=0, bottom=0, value=0, input_name=input_name, output_name=input_name + '__causal_pad__')
input_name = input_name + '__causal_pad__'
keras_padding = 'valid' # depends on [control=['if'], data=['keras_padding']]
builder.add_convolution(name=layer, kernel_channels=input_dim, output_channels=n_filters, height=1, width=filter_length, stride_height=1, stride_width=stride_width, border_mode=keras_padding, groups=1, W=W, b=b, has_bias=has_bias, is_deconv=False, output_shape=output_shape, input_name=input_name, output_name=output_name, dilation_factors=dilations) |
def find_files_for_use(self, all_files):
"""
Given a list of all the files to consider, only yield Path objects
for those we care about, given our filters
"""
for path in all_files:
# Find the path relative to the parent dir
relpath = self.relpath_for(path)
# Don't care about the ./
if relpath.startswith("./"):
relpath = relpath[2:]
# Only care about paths that aren't filtered
if not self.is_filtered(relpath):
yield Path(path, relpath) | def function[find_files_for_use, parameter[self, all_files]]:
constant[
Given a list of all the files to consider, only yield Path objects
for those we care about, given our filters
]
for taget[name[path]] in starred[name[all_files]] begin[:]
variable[relpath] assign[=] call[name[self].relpath_for, parameter[name[path]]]
if call[name[relpath].startswith, parameter[constant[./]]] begin[:]
variable[relpath] assign[=] call[name[relpath]][<ast.Slice object at 0x7da20c991a20>]
if <ast.UnaryOp object at 0x7da20c993850> begin[:]
<ast.Yield object at 0x7da20c993df0> | keyword[def] identifier[find_files_for_use] ( identifier[self] , identifier[all_files] ):
literal[string]
keyword[for] identifier[path] keyword[in] identifier[all_files] :
identifier[relpath] = identifier[self] . identifier[relpath_for] ( identifier[path] )
keyword[if] identifier[relpath] . identifier[startswith] ( literal[string] ):
identifier[relpath] = identifier[relpath] [ literal[int] :]
keyword[if] keyword[not] identifier[self] . identifier[is_filtered] ( identifier[relpath] ):
keyword[yield] identifier[Path] ( identifier[path] , identifier[relpath] ) | def find_files_for_use(self, all_files):
"""
Given a list of all the files to consider, only yield Path objects
for those we care about, given our filters
"""
for path in all_files:
# Find the path relative to the parent dir
relpath = self.relpath_for(path)
# Don't care about the ./
if relpath.startswith('./'):
relpath = relpath[2:] # depends on [control=['if'], data=[]]
# Only care about paths that aren't filtered
if not self.is_filtered(relpath):
yield Path(path, relpath) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] |
def list_contacts(self, **kwargs):
"""
List all contacts, optionally filtered by a query. Specify filters as
query keyword argument, such as:
email=abc@xyz.com,
mobile=1234567890,
phone=1234567890,
contacts can be filtered by state and company_id such as:
state=[blocked/deleted/unverified/verified]
company_id=1234
contacts updated after a timestamp can be filtered such as;
_updated_since=2018-01-19T02:00:00Z
Passing None means that no named filter will be passed to
Freshdesk, which returns list of all contacts
"""
url = 'contacts?'
page = 1 if not 'page' in kwargs else kwargs['page']
per_page = 100 if not 'per_page' in kwargs else kwargs['per_page']
contacts = []
# Skip pagination by looping over each page and adding tickets if 'page' key is not in kwargs.
# else return the requested page and break the loop
while True:
this_page = self._api._get(url + 'page=%d&per_page=%d'
% (page, per_page), kwargs)
contacts += this_page
if len(this_page) < per_page or 'page' in kwargs:
break
page += 1
return [Contact(**c) for c in contacts] | def function[list_contacts, parameter[self]]:
constant[
List all contacts, optionally filtered by a query. Specify filters as
query keyword argument, such as:
email=abc@xyz.com,
mobile=1234567890,
phone=1234567890,
contacts can be filtered by state and company_id such as:
state=[blocked/deleted/unverified/verified]
company_id=1234
contacts updated after a timestamp can be filtered such as;
_updated_since=2018-01-19T02:00:00Z
Passing None means that no named filter will be passed to
Freshdesk, which returns list of all contacts
]
variable[url] assign[=] constant[contacts?]
variable[page] assign[=] <ast.IfExp object at 0x7da1b11748e0>
variable[per_page] assign[=] <ast.IfExp object at 0x7da1b1174f70>
variable[contacts] assign[=] list[[]]
while constant[True] begin[:]
variable[this_page] assign[=] call[name[self]._api._get, parameter[binary_operation[name[url] + binary_operation[constant[page=%d&per_page=%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1165990>, <ast.Name object at 0x7da1b11657b0>]]]], name[kwargs]]]
<ast.AugAssign object at 0x7da1b11657e0>
if <ast.BoolOp object at 0x7da1b1166260> begin[:]
break
<ast.AugAssign object at 0x7da1b1139a50>
return[<ast.ListComp object at 0x7da1b1138820>] | keyword[def] identifier[list_contacts] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[url] = literal[string]
identifier[page] = literal[int] keyword[if] keyword[not] literal[string] keyword[in] identifier[kwargs] keyword[else] identifier[kwargs] [ literal[string] ]
identifier[per_page] = literal[int] keyword[if] keyword[not] literal[string] keyword[in] identifier[kwargs] keyword[else] identifier[kwargs] [ literal[string] ]
identifier[contacts] =[]
keyword[while] keyword[True] :
identifier[this_page] = identifier[self] . identifier[_api] . identifier[_get] ( identifier[url] + literal[string]
%( identifier[page] , identifier[per_page] ), identifier[kwargs] )
identifier[contacts] += identifier[this_page]
keyword[if] identifier[len] ( identifier[this_page] )< identifier[per_page] keyword[or] literal[string] keyword[in] identifier[kwargs] :
keyword[break]
identifier[page] += literal[int]
keyword[return] [ identifier[Contact] (** identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[contacts] ] | def list_contacts(self, **kwargs):
"""
List all contacts, optionally filtered by a query. Specify filters as
query keyword argument, such as:
email=abc@xyz.com,
mobile=1234567890,
phone=1234567890,
contacts can be filtered by state and company_id such as:
state=[blocked/deleted/unverified/verified]
company_id=1234
contacts updated after a timestamp can be filtered such as;
_updated_since=2018-01-19T02:00:00Z
Passing None means that no named filter will be passed to
Freshdesk, which returns list of all contacts
"""
url = 'contacts?'
page = 1 if not 'page' in kwargs else kwargs['page']
per_page = 100 if not 'per_page' in kwargs else kwargs['per_page']
contacts = []
# Skip pagination by looping over each page and adding tickets if 'page' key is not in kwargs.
# else return the requested page and break the loop
while True:
this_page = self._api._get(url + 'page=%d&per_page=%d' % (page, per_page), kwargs)
contacts += this_page
if len(this_page) < per_page or 'page' in kwargs:
break # depends on [control=['if'], data=[]]
page += 1 # depends on [control=['while'], data=[]]
return [Contact(**c) for c in contacts] |
def multiply(self, a, b):
"""
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
"""
if a is None or b is None: return None
m, n = len(a), len(b[0])
if len(b) != n:
raise Exception("A's column number must be equal to B's row number.")
l = len(b[0])
table_a, table_b = {}, {}
for i, row in enumerate(a):
for j, ele in enumerate(row):
if ele:
if i not in table_a: table_a[i] = {}
table_a[i][j] = ele
for i, row in enumerate(b):
for j, ele in enumerate(row):
if ele:
if i not in table_b: table_b[i] = {}
table_b[i][j] = ele
c = [[0 for j in range(l)] for i in range(m)]
for i in table_a:
for k in table_a[i]:
if k not in table_b: continue
for j in table_b[k]:
c[i][j] += table_a[i][k] * table_b[k][j]
return c | def function[multiply, parameter[self, a, b]]:
constant[
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
]
if <ast.BoolOp object at 0x7da1b1eeb340> begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da1b1eeb010> assign[=] tuple[[<ast.Call object at 0x7da1b1ee84c0>, <ast.Call object at 0x7da1b1eeb520>]]
if compare[call[name[len], parameter[name[b]]] not_equal[!=] name[n]] begin[:]
<ast.Raise object at 0x7da1b1eea5f0>
variable[l] assign[=] call[name[len], parameter[call[name[b]][constant[0]]]]
<ast.Tuple object at 0x7da1b1ee98d0> assign[=] tuple[[<ast.Dict object at 0x7da1b1ee9660>, <ast.Dict object at 0x7da1b1eea2f0>]]
for taget[tuple[[<ast.Name object at 0x7da1b1ee80d0>, <ast.Name object at 0x7da1b1ee8430>]]] in starred[call[name[enumerate], parameter[name[a]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1ee8310>, <ast.Name object at 0x7da1b1ee8520>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if name[ele] begin[:]
if compare[name[i] <ast.NotIn object at 0x7da2590d7190> name[table_a]] begin[:]
call[name[table_a]][name[i]] assign[=] dictionary[[], []]
call[call[name[table_a]][name[i]]][name[j]] assign[=] name[ele]
for taget[tuple[[<ast.Name object at 0x7da1b1ee95d0>, <ast.Name object at 0x7da1b1eeaa70>]]] in starred[call[name[enumerate], parameter[name[b]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1ee9a20>, <ast.Name object at 0x7da1b1eea620>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if name[ele] begin[:]
if compare[name[i] <ast.NotIn object at 0x7da2590d7190> name[table_b]] begin[:]
call[name[table_b]][name[i]] assign[=] dictionary[[], []]
call[call[name[table_b]][name[i]]][name[j]] assign[=] name[ele]
variable[c] assign[=] <ast.ListComp object at 0x7da1b20757e0>
for taget[name[i]] in starred[name[table_a]] begin[:]
for taget[name[k]] in starred[call[name[table_a]][name[i]]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[table_b]] begin[:]
continue
for taget[name[j]] in starred[call[name[table_b]][name[k]]] begin[:]
<ast.AugAssign object at 0x7da1b2074040>
return[name[c]] | keyword[def] identifier[multiply] ( identifier[self] , identifier[a] , identifier[b] ):
literal[string]
keyword[if] identifier[a] keyword[is] keyword[None] keyword[or] identifier[b] keyword[is] keyword[None] : keyword[return] keyword[None]
identifier[m] , identifier[n] = identifier[len] ( identifier[a] ), identifier[len] ( identifier[b] [ literal[int] ])
keyword[if] identifier[len] ( identifier[b] )!= identifier[n] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[l] = identifier[len] ( identifier[b] [ literal[int] ])
identifier[table_a] , identifier[table_b] ={},{}
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[a] ):
keyword[for] identifier[j] , identifier[ele] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[ele] :
keyword[if] identifier[i] keyword[not] keyword[in] identifier[table_a] : identifier[table_a] [ identifier[i] ]={}
identifier[table_a] [ identifier[i] ][ identifier[j] ]= identifier[ele]
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[b] ):
keyword[for] identifier[j] , identifier[ele] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[ele] :
keyword[if] identifier[i] keyword[not] keyword[in] identifier[table_b] : identifier[table_b] [ identifier[i] ]={}
identifier[table_b] [ identifier[i] ][ identifier[j] ]= identifier[ele]
identifier[c] =[[ literal[int] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[l] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[m] )]
keyword[for] identifier[i] keyword[in] identifier[table_a] :
keyword[for] identifier[k] keyword[in] identifier[table_a] [ identifier[i] ]:
keyword[if] identifier[k] keyword[not] keyword[in] identifier[table_b] : keyword[continue]
keyword[for] identifier[j] keyword[in] identifier[table_b] [ identifier[k] ]:
identifier[c] [ identifier[i] ][ identifier[j] ]+= identifier[table_a] [ identifier[i] ][ identifier[k] ]* identifier[table_b] [ identifier[k] ][ identifier[j] ]
keyword[return] identifier[c] | def multiply(self, a, b):
"""
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
"""
if a is None or b is None:
return None # depends on [control=['if'], data=[]]
(m, n) = (len(a), len(b[0]))
if len(b) != n:
raise Exception("A's column number must be equal to B's row number.") # depends on [control=['if'], data=[]]
l = len(b[0])
(table_a, table_b) = ({}, {})
for (i, row) in enumerate(a):
for (j, ele) in enumerate(row):
if ele:
if i not in table_a:
table_a[i] = {} # depends on [control=['if'], data=['i', 'table_a']]
table_a[i][j] = ele # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
for (i, row) in enumerate(b):
for (j, ele) in enumerate(row):
if ele:
if i not in table_b:
table_b[i] = {} # depends on [control=['if'], data=['i', 'table_b']]
table_b[i][j] = ele # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
c = [[0 for j in range(l)] for i in range(m)]
for i in table_a:
for k in table_a[i]:
if k not in table_b:
continue # depends on [control=['if'], data=[]]
for j in table_b[k]:
c[i][j] += table_a[i][k] * table_b[k][j] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['i']]
return c |
def _buildElementTree(self,):
"""Turn object into an Element Tree
"""
t_paging = ctree.Element('paging')
t_paging.set('model', self.model)
for key in self.__dict__.keys():
if key != 'model':
t_tag = ctree.SubElement(t_paging, key)
for item in self.__dict__[key].items():
t_tag.set(*item)
self.etree = t_paging
return t_paging | def function[_buildElementTree, parameter[self]]:
constant[Turn object into an Element Tree
]
variable[t_paging] assign[=] call[name[ctree].Element, parameter[constant[paging]]]
call[name[t_paging].set, parameter[constant[model], name[self].model]]
for taget[name[key]] in starred[call[name[self].__dict__.keys, parameter[]]] begin[:]
if compare[name[key] not_equal[!=] constant[model]] begin[:]
variable[t_tag] assign[=] call[name[ctree].SubElement, parameter[name[t_paging], name[key]]]
for taget[name[item]] in starred[call[call[name[self].__dict__][name[key]].items, parameter[]]] begin[:]
call[name[t_tag].set, parameter[<ast.Starred object at 0x7da204566ef0>]]
name[self].etree assign[=] name[t_paging]
return[name[t_paging]] | keyword[def] identifier[_buildElementTree] ( identifier[self] ,):
literal[string]
identifier[t_paging] = identifier[ctree] . identifier[Element] ( literal[string] )
identifier[t_paging] . identifier[set] ( literal[string] , identifier[self] . identifier[model] )
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[__dict__] . identifier[keys] ():
keyword[if] identifier[key] != literal[string] :
identifier[t_tag] = identifier[ctree] . identifier[SubElement] ( identifier[t_paging] , identifier[key] )
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[__dict__] [ identifier[key] ]. identifier[items] ():
identifier[t_tag] . identifier[set] (* identifier[item] )
identifier[self] . identifier[etree] = identifier[t_paging]
keyword[return] identifier[t_paging] | def _buildElementTree(self):
"""Turn object into an Element Tree
"""
t_paging = ctree.Element('paging')
t_paging.set('model', self.model)
for key in self.__dict__.keys():
if key != 'model':
t_tag = ctree.SubElement(t_paging, key)
for item in self.__dict__[key].items():
t_tag.set(*item) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
self.etree = t_paging
return t_paging |
def createConfig(app_version=5.0, wname="", wdesc="", wdescl="", wlic="", wlan="", wurl="", aname="", abio=""):
"""Generates a config file from the information"""
config_file_dir = os.path.join(cwd, "config.py")
config_file = open(config_file_dir, "w")
config_file.write('blended_version = ' + app_version + '\n')
config_file.write('\n')
config_file.write(
'# Configuration is automatically generated by Blended (http://jmroper.com/blended), feel free to edit any values below')
config_file.write('\n')
config_file.write('website_name = "' + wname + '"\n')
config_file.write('website_description = "' + wdesc + '"\n')
config_file.write(
'website_description_long = "' + wdescl + '"\n')
config_file.write('website_license = "' + wlic + '"\n')
config_file.write('website_language = "' + wlan + '"\n')
config_file.write('website_url = "' + wurl + '"\n')
config_file.write('\n')
config_file.write('author_name = "' + aname + '"\n')
config_file.write('author_bio = "' + abio + '"\n')
config_file.write('\n')
config_file.write('home_page_list = True\n')
config_file.write('\n')
config_file.write('plugins = [] # Place all needed plugins in here\n')
config_file.write(
'custom_variables = {} # Place all custom variables in here\n')
config_file.write('\n')
config_file.write('minify_css = False\n')
config_file.write('minify_js = False\n')
config_file.write('\n')
config_file.write('# The following values are used for FTP uploads')
config_file.write('\n')
config_file.write('ftp_server = "localhost"\n')
config_file.write('ftp_username = "user"\n')
config_file.write('ftp_password = "pass"\n')
config_file.write('ftp_port = 21\n')
config_file.write('ftp_upload_path = "public_html/myWebsite"\n')
config_file.close() | def function[createConfig, parameter[app_version, wname, wdesc, wdescl, wlic, wlan, wurl, aname, abio]]:
constant[Generates a config file from the information]
variable[config_file_dir] assign[=] call[name[os].path.join, parameter[name[cwd], constant[config.py]]]
variable[config_file] assign[=] call[name[open], parameter[name[config_file_dir], constant[w]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[blended_version = ] + name[app_version]] + constant[
]]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[# Configuration is automatically generated by Blended (http://jmroper.com/blended), feel free to edit any values below]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_name = "] + name[wname]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_description = "] + name[wdesc]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_description_long = "] + name[wdescl]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_license = "] + name[wlic]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_language = "] + name[wlan]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[website_url = "] + name[wurl]] + constant["
]]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[author_name = "] + name[aname]] + constant["
]]]]
call[name[config_file].write, parameter[binary_operation[binary_operation[constant[author_bio = "] + name[abio]] + constant["
]]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[home_page_list = True
]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[plugins = [] # Place all needed plugins in here
]]]
call[name[config_file].write, parameter[constant[custom_variables = {} # Place all custom variables in here
]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[minify_css = False
]]]
call[name[config_file].write, parameter[constant[minify_js = False
]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[# The following values are used for FTP uploads]]]
call[name[config_file].write, parameter[constant[
]]]
call[name[config_file].write, parameter[constant[ftp_server = "localhost"
]]]
call[name[config_file].write, parameter[constant[ftp_username = "user"
]]]
call[name[config_file].write, parameter[constant[ftp_password = "pass"
]]]
call[name[config_file].write, parameter[constant[ftp_port = 21
]]]
call[name[config_file].write, parameter[constant[ftp_upload_path = "public_html/myWebsite"
]]]
call[name[config_file].close, parameter[]] | keyword[def] identifier[createConfig] ( identifier[app_version] = literal[int] , identifier[wname] = literal[string] , identifier[wdesc] = literal[string] , identifier[wdescl] = literal[string] , identifier[wlic] = literal[string] , identifier[wlan] = literal[string] , identifier[wurl] = literal[string] , identifier[aname] = literal[string] , identifier[abio] = literal[string] ):
literal[string]
identifier[config_file_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , literal[string] )
identifier[config_file] = identifier[open] ( identifier[config_file_dir] , literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[app_version] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] (
literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[wname] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[wdesc] + literal[string] )
identifier[config_file] . identifier[write] (
literal[string] + identifier[wdescl] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[wlic] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[wlan] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[wurl] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[aname] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] + identifier[abio] + literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] (
literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[write] ( literal[string] )
identifier[config_file] . identifier[close] () | def createConfig(app_version=5.0, wname='', wdesc='', wdescl='', wlic='', wlan='', wurl='', aname='', abio=''):
"""Generates a config file from the information"""
config_file_dir = os.path.join(cwd, 'config.py')
config_file = open(config_file_dir, 'w')
config_file.write('blended_version = ' + app_version + '\n')
config_file.write('\n')
config_file.write('# Configuration is automatically generated by Blended (http://jmroper.com/blended), feel free to edit any values below')
config_file.write('\n')
config_file.write('website_name = "' + wname + '"\n')
config_file.write('website_description = "' + wdesc + '"\n')
config_file.write('website_description_long = "' + wdescl + '"\n')
config_file.write('website_license = "' + wlic + '"\n')
config_file.write('website_language = "' + wlan + '"\n')
config_file.write('website_url = "' + wurl + '"\n')
config_file.write('\n')
config_file.write('author_name = "' + aname + '"\n')
config_file.write('author_bio = "' + abio + '"\n')
config_file.write('\n')
config_file.write('home_page_list = True\n')
config_file.write('\n')
config_file.write('plugins = [] # Place all needed plugins in here\n')
config_file.write('custom_variables = {} # Place all custom variables in here\n')
config_file.write('\n')
config_file.write('minify_css = False\n')
config_file.write('minify_js = False\n')
config_file.write('\n')
config_file.write('# The following values are used for FTP uploads')
config_file.write('\n')
config_file.write('ftp_server = "localhost"\n')
config_file.write('ftp_username = "user"\n')
config_file.write('ftp_password = "pass"\n')
config_file.write('ftp_port = 21\n')
config_file.write('ftp_upload_path = "public_html/myWebsite"\n')
config_file.close() |
def create(self, spec_resolver):
"""Creates json documents validator from spec resolver.
:param spec_resolver: reference resolver.
:return: RefResolver for spec with cached remote $refs used during
validation.
:rtype: :class:`jsonschema.RefResolver`
"""
validator_cls = self.spec_validator_factory.from_resolver(
spec_resolver)
return validator_cls(
self.schema, resolver=self.schema_resolver) | def function[create, parameter[self, spec_resolver]]:
constant[Creates json documents validator from spec resolver.
:param spec_resolver: reference resolver.
:return: RefResolver for spec with cached remote $refs used during
validation.
:rtype: :class:`jsonschema.RefResolver`
]
variable[validator_cls] assign[=] call[name[self].spec_validator_factory.from_resolver, parameter[name[spec_resolver]]]
return[call[name[validator_cls], parameter[name[self].schema]]] | keyword[def] identifier[create] ( identifier[self] , identifier[spec_resolver] ):
literal[string]
identifier[validator_cls] = identifier[self] . identifier[spec_validator_factory] . identifier[from_resolver] (
identifier[spec_resolver] )
keyword[return] identifier[validator_cls] (
identifier[self] . identifier[schema] , identifier[resolver] = identifier[self] . identifier[schema_resolver] ) | def create(self, spec_resolver):
"""Creates json documents validator from spec resolver.
:param spec_resolver: reference resolver.
:return: RefResolver for spec with cached remote $refs used during
validation.
:rtype: :class:`jsonschema.RefResolver`
"""
validator_cls = self.spec_validator_factory.from_resolver(spec_resolver)
return validator_cls(self.schema, resolver=self.schema_resolver) |
def _jtime(self, timestamp):
""" Convert datetime or unix_timestamp into Time
"""
if isinstance(timestamp, datetime):
timestamp = time.mktime(timestamp.timetuple())
return self._sc._jvm.Time(long(timestamp * 1000)) | def function[_jtime, parameter[self, timestamp]]:
constant[ Convert datetime or unix_timestamp into Time
]
if call[name[isinstance], parameter[name[timestamp], name[datetime]]] begin[:]
variable[timestamp] assign[=] call[name[time].mktime, parameter[call[name[timestamp].timetuple, parameter[]]]]
return[call[name[self]._sc._jvm.Time, parameter[call[name[long], parameter[binary_operation[name[timestamp] * constant[1000]]]]]]] | keyword[def] identifier[_jtime] ( identifier[self] , identifier[timestamp] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[timestamp] , identifier[datetime] ):
identifier[timestamp] = identifier[time] . identifier[mktime] ( identifier[timestamp] . identifier[timetuple] ())
keyword[return] identifier[self] . identifier[_sc] . identifier[_jvm] . identifier[Time] ( identifier[long] ( identifier[timestamp] * literal[int] )) | def _jtime(self, timestamp):
""" Convert datetime or unix_timestamp into Time
"""
if isinstance(timestamp, datetime):
timestamp = time.mktime(timestamp.timetuple()) # depends on [control=['if'], data=[]]
return self._sc._jvm.Time(long(timestamp * 1000)) |
def make_app(global_conf, full_stack=True, **app_conf):
"""Create a Pylons WSGI application and return it
``global_conf``
The inherited configuration for this application. Normally from
the [DEFAULT] section of the Paste ini file.
``full_stack``
Whether or not this application provides a full WSGI stack (by
default, meaning it handles its own exceptions and errors).
Disable full_stack when this application is "managed" by
another WSGI middleware.
``app_conf``
The application's local configuration. Normally specified in
the [app:<name>] section of the Paste ini file (where <name>
defaults to main).
"""
# Configure the Pylons environment
load_environment(global_conf, app_conf)
# The Pylons WSGI app
app = PylonsApp()
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
# Authentication middleware
app = AuthMiddleware(app)
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
app = CacheMiddleware(app, config)
if asbool(full_stack):
# Handle Python exceptions
app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
# Display error documents for 401, 403, 404 status codes (and
# 500 when debug is disabled)
if asbool(config['debug']):
app = StatusCodeRedirect(app)
else:
app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
# Establish the Registry for this application
app = RegistryManager(app)
# Static files (If running in production, and Apache or another web
# server is handling this static content, remove the following 2 lines)
static_app = StaticURLParser(config['pylons.paths']['static_files'])
app = Cascade([static_app, app])
return app | def function[make_app, parameter[global_conf, full_stack]]:
constant[Create a Pylons WSGI application and return it
``global_conf``
The inherited configuration for this application. Normally from
the [DEFAULT] section of the Paste ini file.
``full_stack``
Whether or not this application provides a full WSGI stack (by
default, meaning it handles its own exceptions and errors).
Disable full_stack when this application is "managed" by
another WSGI middleware.
``app_conf``
The application's local configuration. Normally specified in
the [app:<name>] section of the Paste ini file (where <name>
defaults to main).
]
call[name[load_environment], parameter[name[global_conf], name[app_conf]]]
variable[app] assign[=] call[name[PylonsApp], parameter[]]
variable[app] assign[=] call[name[AuthMiddleware], parameter[name[app]]]
variable[app] assign[=] call[name[RoutesMiddleware], parameter[name[app], call[name[config]][constant[routes.map]]]]
variable[app] assign[=] call[name[SessionMiddleware], parameter[name[app], name[config]]]
variable[app] assign[=] call[name[CacheMiddleware], parameter[name[app], name[config]]]
if call[name[asbool], parameter[name[full_stack]]] begin[:]
variable[app] assign[=] call[name[ErrorHandler], parameter[name[app], name[global_conf]]]
if call[name[asbool], parameter[call[name[config]][constant[debug]]]] begin[:]
variable[app] assign[=] call[name[StatusCodeRedirect], parameter[name[app]]]
variable[app] assign[=] call[name[RegistryManager], parameter[name[app]]]
variable[static_app] assign[=] call[name[StaticURLParser], parameter[call[call[name[config]][constant[pylons.paths]]][constant[static_files]]]]
variable[app] assign[=] call[name[Cascade], parameter[list[[<ast.Name object at 0x7da1b23e6200>, <ast.Name object at 0x7da1b23e5a50>]]]]
return[name[app]] | keyword[def] identifier[make_app] ( identifier[global_conf] , identifier[full_stack] = keyword[True] ,** identifier[app_conf] ):
literal[string]
identifier[load_environment] ( identifier[global_conf] , identifier[app_conf] )
identifier[app] = identifier[PylonsApp] ()
identifier[app] = identifier[AuthMiddleware] ( identifier[app] )
identifier[app] = identifier[RoutesMiddleware] ( identifier[app] , identifier[config] [ literal[string] ])
identifier[app] = identifier[SessionMiddleware] ( identifier[app] , identifier[config] )
identifier[app] = identifier[CacheMiddleware] ( identifier[app] , identifier[config] )
keyword[if] identifier[asbool] ( identifier[full_stack] ):
identifier[app] = identifier[ErrorHandler] ( identifier[app] , identifier[global_conf] ,** identifier[config] [ literal[string] ])
keyword[if] identifier[asbool] ( identifier[config] [ literal[string] ]):
identifier[app] = identifier[StatusCodeRedirect] ( identifier[app] )
keyword[else] :
identifier[app] = identifier[StatusCodeRedirect] ( identifier[app] ,[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[app] = identifier[RegistryManager] ( identifier[app] )
identifier[static_app] = identifier[StaticURLParser] ( identifier[config] [ literal[string] ][ literal[string] ])
identifier[app] = identifier[Cascade] ([ identifier[static_app] , identifier[app] ])
keyword[return] identifier[app] | def make_app(global_conf, full_stack=True, **app_conf):
"""Create a Pylons WSGI application and return it
``global_conf``
The inherited configuration for this application. Normally from
the [DEFAULT] section of the Paste ini file.
``full_stack``
Whether or not this application provides a full WSGI stack (by
default, meaning it handles its own exceptions and errors).
Disable full_stack when this application is "managed" by
another WSGI middleware.
``app_conf``
The application's local configuration. Normally specified in
the [app:<name>] section of the Paste ini file (where <name>
defaults to main).
"""
# Configure the Pylons environment
load_environment(global_conf, app_conf)
# The Pylons WSGI app
app = PylonsApp()
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
# Authentication middleware
app = AuthMiddleware(app)
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
app = CacheMiddleware(app, config)
if asbool(full_stack):
# Handle Python exceptions
app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
# Display error documents for 401, 403, 404 status codes (and
# 500 when debug is disabled)
if asbool(config['debug']):
app = StatusCodeRedirect(app) # depends on [control=['if'], data=[]]
else:
app = StatusCodeRedirect(app, [400, 401, 403, 404, 500]) # depends on [control=['if'], data=[]]
# Establish the Registry for this application
app = RegistryManager(app) # Static files (If running in production, and Apache or another web
# server is handling this static content, remove the following 2 lines)
static_app = StaticURLParser(config['pylons.paths']['static_files'])
app = Cascade([static_app, app])
return app |
def backup_config(self, filename):
"""Backup the current config file.
"""
backup_name = filename + '-' + self.version
warn('Moving current configuration to ' + backup_name)
try:
shutil.copy2(filename, backup_name)
return True
except (IOError, OSError) as e:
print('Error copying %s: %s' % (filename, e.strerror or e), file=sys.stderr)
return False | def function[backup_config, parameter[self, filename]]:
constant[Backup the current config file.
]
variable[backup_name] assign[=] binary_operation[binary_operation[name[filename] + constant[-]] + name[self].version]
call[name[warn], parameter[binary_operation[constant[Moving current configuration to ] + name[backup_name]]]]
<ast.Try object at 0x7da18dc98190> | keyword[def] identifier[backup_config] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[backup_name] = identifier[filename] + literal[string] + identifier[self] . identifier[version]
identifier[warn] ( literal[string] + identifier[backup_name] )
keyword[try] :
identifier[shutil] . identifier[copy2] ( identifier[filename] , identifier[backup_name] )
keyword[return] keyword[True]
keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[e] :
identifier[print] ( literal[string] %( identifier[filename] , identifier[e] . identifier[strerror] keyword[or] identifier[e] ), identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return] keyword[False] | def backup_config(self, filename):
"""Backup the current config file.
"""
backup_name = filename + '-' + self.version
warn('Moving current configuration to ' + backup_name)
try:
shutil.copy2(filename, backup_name)
return True # depends on [control=['try'], data=[]]
except (IOError, OSError) as e:
print('Error copying %s: %s' % (filename, e.strerror or e), file=sys.stderr)
return False # depends on [control=['except'], data=['e']] |
def _mb_normal_model(self, beta, mini_batch):
""" Creates the structure of the model (model matrices, etc) for
a mini-batch Normal family ARIMAX model.
Here the structure is the same as for _normal_model() but we are going to
sample a random choice of data points (of length mini_batch).
Parameters
----------
beta : np.ndarray
Contains untransformed starting values for the latent variables
mini_batch : int
Mini batch size for the data sampling
Returns
----------
mu : np.ndarray
Contains the predicted values (location) for the time series
Y : np.ndarray
Contains the length-adjusted time series (accounting for lags)
"""
rand_int = np.random.randint(low=0, high=self.data_length-mini_batch-self.max_lag+1)
sample = np.arange(start=rand_int, stop=rand_int+mini_batch)
data = self.y[sample]
X = self.X[sample, :]
Y = data[self.max_lag:]
if self.ar != 0:
ar_matrix = data[(self.max_lag-1):-1]
for i in range(1, self.ar):
ar_matrix = np.vstack((ar_matrix, data[(self.max_lag-i-1):-i-1]))
else:
ar_matrix = np.zeros(data.shape[0]-self.max_lag)
# Transform latent variables
z = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
# Constant and AR terms
if self.ar == 0:
mu = np.transpose(ar_matrix)
elif self.ar == 1:
mu = np.transpose(ar_matrix)*z[:-self.family_z_no-self.ma-len(self.X_names)][0]
else:
mu = np.matmul(np.transpose(ar_matrix),z[:-self.family_z_no-self.ma-len(self.X_names)])
# X terms
mu = mu + np.matmul(X[self.integ+self.max_lag:],z[self.ma+self.ar:(self.ma+self.ar+len(self.X_names))])
# MA terms
if self.ma != 0:
mu = arimax_recursion(z, mu, Y, self.max_lag, Y.shape[0], self.ar, self.ma)
return mu, Y | def function[_mb_normal_model, parameter[self, beta, mini_batch]]:
constant[ Creates the structure of the model (model matrices, etc) for
a mini-batch Normal family ARIMAX model.
Here the structure is the same as for _normal_model() but we are going to
sample a random choice of data points (of length mini_batch).
Parameters
----------
beta : np.ndarray
Contains untransformed starting values for the latent variables
mini_batch : int
Mini batch size for the data sampling
Returns
----------
mu : np.ndarray
Contains the predicted values (location) for the time series
Y : np.ndarray
Contains the length-adjusted time series (accounting for lags)
]
variable[rand_int] assign[=] call[name[np].random.randint, parameter[]]
variable[sample] assign[=] call[name[np].arange, parameter[]]
variable[data] assign[=] call[name[self].y][name[sample]]
variable[X] assign[=] call[name[self].X][tuple[[<ast.Name object at 0x7da18fe91810>, <ast.Slice object at 0x7da18fe918d0>]]]
variable[Y] assign[=] call[name[data]][<ast.Slice object at 0x7da18fe90a90>]
if compare[name[self].ar not_equal[!=] constant[0]] begin[:]
variable[ar_matrix] assign[=] call[name[data]][<ast.Slice object at 0x7da18fe91330>]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[self].ar]]] begin[:]
variable[ar_matrix] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da18fe91720>, <ast.Subscript object at 0x7da18fe93f70>]]]]
variable[z] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da18bc71480>]]
if compare[name[self].ar equal[==] constant[0]] begin[:]
variable[mu] assign[=] call[name[np].transpose, parameter[name[ar_matrix]]]
variable[mu] assign[=] binary_operation[name[mu] + call[name[np].matmul, parameter[call[name[X]][<ast.Slice object at 0x7da20c6c64d0>], call[name[z]][<ast.Slice object at 0x7da20c6c53f0>]]]]
if compare[name[self].ma not_equal[!=] constant[0]] begin[:]
variable[mu] assign[=] call[name[arimax_recursion], parameter[name[z], name[mu], name[Y], name[self].max_lag, call[name[Y].shape][constant[0]], name[self].ar, name[self].ma]]
return[tuple[[<ast.Name object at 0x7da20c6c5a50>, <ast.Name object at 0x7da20c6c61d0>]]] | keyword[def] identifier[_mb_normal_model] ( identifier[self] , identifier[beta] , identifier[mini_batch] ):
literal[string]
identifier[rand_int] = identifier[np] . identifier[random] . identifier[randint] ( identifier[low] = literal[int] , identifier[high] = identifier[self] . identifier[data_length] - identifier[mini_batch] - identifier[self] . identifier[max_lag] + literal[int] )
identifier[sample] = identifier[np] . identifier[arange] ( identifier[start] = identifier[rand_int] , identifier[stop] = identifier[rand_int] + identifier[mini_batch] )
identifier[data] = identifier[self] . identifier[y] [ identifier[sample] ]
identifier[X] = identifier[self] . identifier[X] [ identifier[sample] ,:]
identifier[Y] = identifier[data] [ identifier[self] . identifier[max_lag] :]
keyword[if] identifier[self] . identifier[ar] != literal[int] :
identifier[ar_matrix] = identifier[data] [( identifier[self] . identifier[max_lag] - literal[int] ):- literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[ar] ):
identifier[ar_matrix] = identifier[np] . identifier[vstack] (( identifier[ar_matrix] , identifier[data] [( identifier[self] . identifier[max_lag] - identifier[i] - literal[int] ):- identifier[i] - literal[int] ]))
keyword[else] :
identifier[ar_matrix] = identifier[np] . identifier[zeros] ( identifier[data] . identifier[shape] [ literal[int] ]- identifier[self] . identifier[max_lag] )
identifier[z] = identifier[np] . identifier[array] ([ identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[beta] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[beta] . identifier[shape] [ literal[int] ])])
keyword[if] identifier[self] . identifier[ar] == literal[int] :
identifier[mu] = identifier[np] . identifier[transpose] ( identifier[ar_matrix] )
keyword[elif] identifier[self] . identifier[ar] == literal[int] :
identifier[mu] = identifier[np] . identifier[transpose] ( identifier[ar_matrix] )* identifier[z] [:- identifier[self] . identifier[family_z_no] - identifier[self] . identifier[ma] - identifier[len] ( identifier[self] . identifier[X_names] )][ literal[int] ]
keyword[else] :
identifier[mu] = identifier[np] . identifier[matmul] ( identifier[np] . identifier[transpose] ( identifier[ar_matrix] ), identifier[z] [:- identifier[self] . identifier[family_z_no] - identifier[self] . identifier[ma] - identifier[len] ( identifier[self] . identifier[X_names] )])
identifier[mu] = identifier[mu] + identifier[np] . identifier[matmul] ( identifier[X] [ identifier[self] . identifier[integ] + identifier[self] . identifier[max_lag] :], identifier[z] [ identifier[self] . identifier[ma] + identifier[self] . identifier[ar] :( identifier[self] . identifier[ma] + identifier[self] . identifier[ar] + identifier[len] ( identifier[self] . identifier[X_names] ))])
keyword[if] identifier[self] . identifier[ma] != literal[int] :
identifier[mu] = identifier[arimax_recursion] ( identifier[z] , identifier[mu] , identifier[Y] , identifier[self] . identifier[max_lag] , identifier[Y] . identifier[shape] [ literal[int] ], identifier[self] . identifier[ar] , identifier[self] . identifier[ma] )
keyword[return] identifier[mu] , identifier[Y] | def _mb_normal_model(self, beta, mini_batch):
""" Creates the structure of the model (model matrices, etc) for
a mini-batch Normal family ARIMAX model.
Here the structure is the same as for _normal_model() but we are going to
sample a random choice of data points (of length mini_batch).
Parameters
----------
beta : np.ndarray
Contains untransformed starting values for the latent variables
mini_batch : int
Mini batch size for the data sampling
Returns
----------
mu : np.ndarray
Contains the predicted values (location) for the time series
Y : np.ndarray
Contains the length-adjusted time series (accounting for lags)
"""
rand_int = np.random.randint(low=0, high=self.data_length - mini_batch - self.max_lag + 1)
sample = np.arange(start=rand_int, stop=rand_int + mini_batch)
data = self.y[sample]
X = self.X[sample, :]
Y = data[self.max_lag:]
if self.ar != 0:
ar_matrix = data[self.max_lag - 1:-1]
for i in range(1, self.ar):
ar_matrix = np.vstack((ar_matrix, data[self.max_lag - i - 1:-i - 1])) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
ar_matrix = np.zeros(data.shape[0] - self.max_lag)
# Transform latent variables
z = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
# Constant and AR terms
if self.ar == 0:
mu = np.transpose(ar_matrix) # depends on [control=['if'], data=[]]
elif self.ar == 1:
mu = np.transpose(ar_matrix) * z[:-self.family_z_no - self.ma - len(self.X_names)][0] # depends on [control=['if'], data=[]]
else:
mu = np.matmul(np.transpose(ar_matrix), z[:-self.family_z_no - self.ma - len(self.X_names)])
# X terms
mu = mu + np.matmul(X[self.integ + self.max_lag:], z[self.ma + self.ar:self.ma + self.ar + len(self.X_names)])
# MA terms
if self.ma != 0:
mu = arimax_recursion(z, mu, Y, self.max_lag, Y.shape[0], self.ar, self.ma) # depends on [control=['if'], data=[]]
return (mu, Y) |
def separate_fields(fields):
"""
Non-foreign key fields can be mapped to new article instances
directly. Foreign key fields require a bit more work.
This method returns a tuple, of the same format:
(flat fields, nested fields)
"""
flat_fields = {}
nested_fields = {}
# exclude "id" and "meta" elements
for k, v in fields.items():
# TODO: remove dependence on KEYS_TO_INCLUDE
if k not in KEYS_TO_EXCLUDE:
if type(v) not in [type({}), type([])]:
flat_fields.update({k: v})
else:
nested_fields.update({k: v})
return flat_fields, nested_fields | def function[separate_fields, parameter[fields]]:
constant[
Non-foreign key fields can be mapped to new article instances
directly. Foreign key fields require a bit more work.
This method returns a tuple, of the same format:
(flat fields, nested fields)
]
variable[flat_fields] assign[=] dictionary[[], []]
variable[nested_fields] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18ede5c30>, <ast.Name object at 0x7da18ede73a0>]]] in starred[call[name[fields].items, parameter[]]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[KEYS_TO_EXCLUDE]] begin[:]
if compare[call[name[type], parameter[name[v]]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Call object at 0x7da18ede4e50>, <ast.Call object at 0x7da18ede7700>]]] begin[:]
call[name[flat_fields].update, parameter[dictionary[[<ast.Name object at 0x7da18ede42e0>], [<ast.Name object at 0x7da18ede7850>]]]]
return[tuple[[<ast.Name object at 0x7da18ede5f60>, <ast.Name object at 0x7da18ede5180>]]] | keyword[def] identifier[separate_fields] ( identifier[fields] ):
literal[string]
identifier[flat_fields] ={}
identifier[nested_fields] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[fields] . identifier[items] ():
keyword[if] identifier[k] keyword[not] keyword[in] identifier[KEYS_TO_EXCLUDE] :
keyword[if] identifier[type] ( identifier[v] ) keyword[not] keyword[in] [ identifier[type] ({}), identifier[type] ([])]:
identifier[flat_fields] . identifier[update] ({ identifier[k] : identifier[v] })
keyword[else] :
identifier[nested_fields] . identifier[update] ({ identifier[k] : identifier[v] })
keyword[return] identifier[flat_fields] , identifier[nested_fields] | def separate_fields(fields):
"""
Non-foreign key fields can be mapped to new article instances
directly. Foreign key fields require a bit more work.
This method returns a tuple, of the same format:
(flat fields, nested fields)
"""
flat_fields = {}
nested_fields = {}
# exclude "id" and "meta" elements
for (k, v) in fields.items():
# TODO: remove dependence on KEYS_TO_INCLUDE
if k not in KEYS_TO_EXCLUDE:
if type(v) not in [type({}), type([])]:
flat_fields.update({k: v}) # depends on [control=['if'], data=[]]
else:
nested_fields.update({k: v}) # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]]
return (flat_fields, nested_fields) |
def server_pxe():
'''
Configure server to PXE perform a one off PXE boot
CLI Example:
.. code-block:: bash
salt dell drac.server_pxe
'''
if __execute_cmd('config -g cfgServerInfo -o \
cfgServerFirstBootDevice PXE'):
if __execute_cmd('config -g cfgServerInfo -o cfgServerBootOnce 1'):
return server_reboot
else:
log.warning('failed to set boot order')
return False
log.warning('failed to configure PXE boot')
return False | def function[server_pxe, parameter[]]:
constant[
Configure server to PXE perform a one off PXE boot
CLI Example:
.. code-block:: bash
salt dell drac.server_pxe
]
if call[name[__execute_cmd], parameter[constant[config -g cfgServerInfo -o cfgServerFirstBootDevice PXE]]] begin[:]
if call[name[__execute_cmd], parameter[constant[config -g cfgServerInfo -o cfgServerBootOnce 1]]] begin[:]
return[name[server_reboot]]
call[name[log].warning, parameter[constant[failed to configure PXE boot]]]
return[constant[False]] | keyword[def] identifier[server_pxe] ():
literal[string]
keyword[if] identifier[__execute_cmd] ( literal[string] ):
keyword[if] identifier[__execute_cmd] ( literal[string] ):
keyword[return] identifier[server_reboot]
keyword[else] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] keyword[False]
identifier[log] . identifier[warning] ( literal[string] )
keyword[return] keyword[False] | def server_pxe():
"""
Configure server to PXE perform a one off PXE boot
CLI Example:
.. code-block:: bash
salt dell drac.server_pxe
"""
if __execute_cmd('config -g cfgServerInfo -o cfgServerFirstBootDevice PXE'):
if __execute_cmd('config -g cfgServerInfo -o cfgServerBootOnce 1'):
return server_reboot # depends on [control=['if'], data=[]]
else:
log.warning('failed to set boot order')
return False # depends on [control=['if'], data=[]]
log.warning('failed to configure PXE boot')
return False |
def _import_module(name, package='vlfd', warn=True, prefix='_py_', ignore='_'):
"""Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
"""
import warnings
from importlib import import_module
try:
try:
module = import_module(name)
except ImportError:
module = import_module('.' + name, package=package)
except ImportError:
if warn:
warnings.warn("failed to import module %s" % name)
else:
for attr in dir(module):
if ignore and attr.startswith(ignore):
continue
if prefix:
if attr in globals():
globals()[prefix + attr] = globals()[attr]
elif warn:
warnings.warn("no Python implementation of " + attr)
globals()[attr] = getattr(module, attr)
return True | def function[_import_module, parameter[name, package, warn, prefix, ignore]]:
constant[Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
]
import module[warnings]
from relative_module[importlib] import module[import_module]
<ast.Try object at 0x7da20e954c40> | keyword[def] identifier[_import_module] ( identifier[name] , identifier[package] = literal[string] , identifier[warn] = keyword[True] , identifier[prefix] = literal[string] , identifier[ignore] = literal[string] ):
literal[string]
keyword[import] identifier[warnings]
keyword[from] identifier[importlib] keyword[import] identifier[import_module]
keyword[try] :
keyword[try] :
identifier[module] = identifier[import_module] ( identifier[name] )
keyword[except] identifier[ImportError] :
identifier[module] = identifier[import_module] ( literal[string] + identifier[name] , identifier[package] = identifier[package] )
keyword[except] identifier[ImportError] :
keyword[if] identifier[warn] :
identifier[warnings] . identifier[warn] ( literal[string] % identifier[name] )
keyword[else] :
keyword[for] identifier[attr] keyword[in] identifier[dir] ( identifier[module] ):
keyword[if] identifier[ignore] keyword[and] identifier[attr] . identifier[startswith] ( identifier[ignore] ):
keyword[continue]
keyword[if] identifier[prefix] :
keyword[if] identifier[attr] keyword[in] identifier[globals] ():
identifier[globals] ()[ identifier[prefix] + identifier[attr] ]= identifier[globals] ()[ identifier[attr] ]
keyword[elif] identifier[warn] :
identifier[warnings] . identifier[warn] ( literal[string] + identifier[attr] )
identifier[globals] ()[ identifier[attr] ]= identifier[getattr] ( identifier[module] , identifier[attr] )
keyword[return] keyword[True] | def _import_module(name, package='vlfd', warn=True, prefix='_py_', ignore='_'):
"""Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
"""
import warnings
from importlib import import_module
try:
try:
module = import_module(name) # depends on [control=['try'], data=[]]
except ImportError:
module = import_module('.' + name, package=package) # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except ImportError:
if warn:
warnings.warn('failed to import module %s' % name) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
else:
for attr in dir(module):
if ignore and attr.startswith(ignore):
continue # depends on [control=['if'], data=[]]
if prefix:
if attr in globals():
globals()[prefix + attr] = globals()[attr] # depends on [control=['if'], data=['attr']]
elif warn:
warnings.warn('no Python implementation of ' + attr) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
globals()[attr] = getattr(module, attr) # depends on [control=['for'], data=['attr']]
return True |
def open(self):
"""Implementation of NAPALM method open."""
try:
connection = self.transport_class(
host=self.hostname,
username=self.username,
password=self.password,
timeout=self.timeout,
**self.eapi_kwargs
)
if self.device is None:
self.device = pyeapi.client.Node(connection, enablepwd=self.enablepwd)
# does not raise an Exception if unusable
# let's try to run a very simple command
self.device.run_commands(["show clock"], encoding="text")
except ConnectionError as ce:
# and this is raised either if device not avaiable
# either if HTTP(S) agent is not enabled
# show management api http-commands
raise ConnectionException(py23_compat.text_type(ce)) | def function[open, parameter[self]]:
constant[Implementation of NAPALM method open.]
<ast.Try object at 0x7da1b1b989a0> | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[connection] = identifier[self] . identifier[transport_class] (
identifier[host] = identifier[self] . identifier[hostname] ,
identifier[username] = identifier[self] . identifier[username] ,
identifier[password] = identifier[self] . identifier[password] ,
identifier[timeout] = identifier[self] . identifier[timeout] ,
** identifier[self] . identifier[eapi_kwargs]
)
keyword[if] identifier[self] . identifier[device] keyword[is] keyword[None] :
identifier[self] . identifier[device] = identifier[pyeapi] . identifier[client] . identifier[Node] ( identifier[connection] , identifier[enablepwd] = identifier[self] . identifier[enablepwd] )
identifier[self] . identifier[device] . identifier[run_commands] ([ literal[string] ], identifier[encoding] = literal[string] )
keyword[except] identifier[ConnectionError] keyword[as] identifier[ce] :
keyword[raise] identifier[ConnectionException] ( identifier[py23_compat] . identifier[text_type] ( identifier[ce] )) | def open(self):
"""Implementation of NAPALM method open."""
try:
connection = self.transport_class(host=self.hostname, username=self.username, password=self.password, timeout=self.timeout, **self.eapi_kwargs)
if self.device is None:
self.device = pyeapi.client.Node(connection, enablepwd=self.enablepwd) # depends on [control=['if'], data=[]]
# does not raise an Exception if unusable
# let's try to run a very simple command
self.device.run_commands(['show clock'], encoding='text') # depends on [control=['try'], data=[]]
except ConnectionError as ce:
# and this is raised either if device not avaiable
# either if HTTP(S) agent is not enabled
# show management api http-commands
raise ConnectionException(py23_compat.text_type(ce)) # depends on [control=['except'], data=['ce']] |
def e123(number, areasize=3, groupsize=4, national=False):
'''
Printable E.123 (Notation for national and international telephone numbers
from ITU) numbers.
:param number: string
:param areasize: int
:param groupsize: int
:param national: bool
>>> print(e123(155542315678))
+1 555 4231 5678
>>> print(e123('+31654231567', areasize=1))
+31 6 5423 1567
>>> print(e123('+3114020', areasize=2))
+31 14 020
>>> print(e123('+312054231567', areasize=2, national=True))
(020) 5423 1567
'''
if isinstance(number, six.integer_types):
return e123('+%s' % number, areasize, groupsize)
elif isinstance(number, six.string_types):
number = strip(number, '-. ()')
if number.startswith('+'):
number = number[1:]
if not number.isdigit():
raise ValueError(_('Invalid telephone number'))
groups = []
prefix = ''
remain = number
if national:
for x in six.moves.xrange(3, 0, -1):
if number[:x] in PHONE_PREFIX:
groups.append('(0%s)' % number[x:x + areasize])
remain = number[x + areasize:]
break
else:
prefix = '+'
for x in six.moves.xrange(3, 0, -1):
if number[:x] in PHONE_PREFIX:
groups.append(number[:x])
groups.append(number[x:x + areasize])
remain = number[x + areasize:]
break
for x in six.moves.xrange(0, len(remain) + 1, groupsize):
groups.append(remain[x:x + groupsize])
return '%s%s' % (prefix, ' '.join(list(filter(None, groups)))) | def function[e123, parameter[number, areasize, groupsize, national]]:
constant[
Printable E.123 (Notation for national and international telephone numbers
from ITU) numbers.
:param number: string
:param areasize: int
:param groupsize: int
:param national: bool
>>> print(e123(155542315678))
+1 555 4231 5678
>>> print(e123('+31654231567', areasize=1))
+31 6 5423 1567
>>> print(e123('+3114020', areasize=2))
+31 14 020
>>> print(e123('+312054231567', areasize=2, national=True))
(020) 5423 1567
]
if call[name[isinstance], parameter[name[number], name[six].integer_types]] begin[:]
return[call[name[e123], parameter[binary_operation[constant[+%s] <ast.Mod object at 0x7da2590d6920> name[number]], name[areasize], name[groupsize]]]] | keyword[def] identifier[e123] ( identifier[number] , identifier[areasize] = literal[int] , identifier[groupsize] = literal[int] , identifier[national] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[number] , identifier[six] . identifier[integer_types] ):
keyword[return] identifier[e123] ( literal[string] % identifier[number] , identifier[areasize] , identifier[groupsize] )
keyword[elif] identifier[isinstance] ( identifier[number] , identifier[six] . identifier[string_types] ):
identifier[number] = identifier[strip] ( identifier[number] , literal[string] )
keyword[if] identifier[number] . identifier[startswith] ( literal[string] ):
identifier[number] = identifier[number] [ literal[int] :]
keyword[if] keyword[not] identifier[number] . identifier[isdigit] ():
keyword[raise] identifier[ValueError] ( identifier[_] ( literal[string] ))
identifier[groups] =[]
identifier[prefix] = literal[string]
identifier[remain] = identifier[number]
keyword[if] identifier[national] :
keyword[for] identifier[x] keyword[in] identifier[six] . identifier[moves] . identifier[xrange] ( literal[int] , literal[int] ,- literal[int] ):
keyword[if] identifier[number] [: identifier[x] ] keyword[in] identifier[PHONE_PREFIX] :
identifier[groups] . identifier[append] ( literal[string] % identifier[number] [ identifier[x] : identifier[x] + identifier[areasize] ])
identifier[remain] = identifier[number] [ identifier[x] + identifier[areasize] :]
keyword[break]
keyword[else] :
identifier[prefix] = literal[string]
keyword[for] identifier[x] keyword[in] identifier[six] . identifier[moves] . identifier[xrange] ( literal[int] , literal[int] ,- literal[int] ):
keyword[if] identifier[number] [: identifier[x] ] keyword[in] identifier[PHONE_PREFIX] :
identifier[groups] . identifier[append] ( identifier[number] [: identifier[x] ])
identifier[groups] . identifier[append] ( identifier[number] [ identifier[x] : identifier[x] + identifier[areasize] ])
identifier[remain] = identifier[number] [ identifier[x] + identifier[areasize] :]
keyword[break]
keyword[for] identifier[x] keyword[in] identifier[six] . identifier[moves] . identifier[xrange] ( literal[int] , identifier[len] ( identifier[remain] )+ literal[int] , identifier[groupsize] ):
identifier[groups] . identifier[append] ( identifier[remain] [ identifier[x] : identifier[x] + identifier[groupsize] ])
keyword[return] literal[string] %( identifier[prefix] , literal[string] . identifier[join] ( identifier[list] ( identifier[filter] ( keyword[None] , identifier[groups] )))) | def e123(number, areasize=3, groupsize=4, national=False):
"""
Printable E.123 (Notation for national and international telephone numbers
from ITU) numbers.
:param number: string
:param areasize: int
:param groupsize: int
:param national: bool
>>> print(e123(155542315678))
+1 555 4231 5678
>>> print(e123('+31654231567', areasize=1))
+31 6 5423 1567
>>> print(e123('+3114020', areasize=2))
+31 14 020
>>> print(e123('+312054231567', areasize=2, national=True))
(020) 5423 1567
"""
if isinstance(number, six.integer_types):
return e123('+%s' % number, areasize, groupsize) # depends on [control=['if'], data=[]]
elif isinstance(number, six.string_types):
number = strip(number, '-. ()')
if number.startswith('+'):
number = number[1:] # depends on [control=['if'], data=[]]
if not number.isdigit():
raise ValueError(_('Invalid telephone number')) # depends on [control=['if'], data=[]]
groups = []
prefix = ''
remain = number
if national:
for x in six.moves.xrange(3, 0, -1):
if number[:x] in PHONE_PREFIX:
groups.append('(0%s)' % number[x:x + areasize])
remain = number[x + areasize:]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]]
else:
prefix = '+'
for x in six.moves.xrange(3, 0, -1):
if number[:x] in PHONE_PREFIX:
groups.append(number[:x])
groups.append(number[x:x + areasize])
remain = number[x + areasize:]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
for x in six.moves.xrange(0, len(remain) + 1, groupsize):
groups.append(remain[x:x + groupsize]) # depends on [control=['for'], data=['x']]
return '%s%s' % (prefix, ' '.join(list(filter(None, groups)))) # depends on [control=['if'], data=[]] |
def get_conn(self, headers=None):
"""
Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict
"""
conn = self.get_connection(self.http_conn_id)
self.base_url = conn.host if conn.host else 'https://api.opsgenie.com'
session = requests.Session()
if headers:
session.headers.update(headers)
return session | def function[get_conn, parameter[self, headers]]:
constant[
Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict
]
variable[conn] assign[=] call[name[self].get_connection, parameter[name[self].http_conn_id]]
name[self].base_url assign[=] <ast.IfExp object at 0x7da1b0595480>
variable[session] assign[=] call[name[requests].Session, parameter[]]
if name[headers] begin[:]
call[name[session].headers.update, parameter[name[headers]]]
return[name[session]] | keyword[def] identifier[get_conn] ( identifier[self] , identifier[headers] = keyword[None] ):
literal[string]
identifier[conn] = identifier[self] . identifier[get_connection] ( identifier[self] . identifier[http_conn_id] )
identifier[self] . identifier[base_url] = identifier[conn] . identifier[host] keyword[if] identifier[conn] . identifier[host] keyword[else] literal[string]
identifier[session] = identifier[requests] . identifier[Session] ()
keyword[if] identifier[headers] :
identifier[session] . identifier[headers] . identifier[update] ( identifier[headers] )
keyword[return] identifier[session] | def get_conn(self, headers=None):
"""
Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict
"""
conn = self.get_connection(self.http_conn_id)
self.base_url = conn.host if conn.host else 'https://api.opsgenie.com'
session = requests.Session()
if headers:
session.headers.update(headers) # depends on [control=['if'], data=[]]
return session |
def QA_fetch_future_list_adv(collections=DATABASE.future_list):
'''
'获取股票列表'
:param collections: mongodb 数据库
:return: DataFrame
'''
future_list_items = QA_fetch_future_list()
if len(future_list_items) == 0:
print("QA Error QA_fetch_future_list_adv call item for item in collections.find() return 0 item, maybe the DATABASE.future_list is empty!")
return None
return future_list_items | def function[QA_fetch_future_list_adv, parameter[collections]]:
constant[
'获取股票列表'
:param collections: mongodb 数据库
:return: DataFrame
]
variable[future_list_items] assign[=] call[name[QA_fetch_future_list], parameter[]]
if compare[call[name[len], parameter[name[future_list_items]]] equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[QA Error QA_fetch_future_list_adv call item for item in collections.find() return 0 item, maybe the DATABASE.future_list is empty!]]]
return[constant[None]]
return[name[future_list_items]] | keyword[def] identifier[QA_fetch_future_list_adv] ( identifier[collections] = identifier[DATABASE] . identifier[future_list] ):
literal[string]
identifier[future_list_items] = identifier[QA_fetch_future_list] ()
keyword[if] identifier[len] ( identifier[future_list_items] )== literal[int] :
identifier[print] ( literal[string] )
keyword[return] keyword[None]
keyword[return] identifier[future_list_items] | def QA_fetch_future_list_adv(collections=DATABASE.future_list):
"""
'获取股票列表'
:param collections: mongodb 数据库
:return: DataFrame
"""
future_list_items = QA_fetch_future_list()
if len(future_list_items) == 0:
print('QA Error QA_fetch_future_list_adv call item for item in collections.find() return 0 item, maybe the DATABASE.future_list is empty!')
return None # depends on [control=['if'], data=[]]
return future_list_items |
def compute_topk_scores_and_seq(sequences, scores, scores_to_gather, flags,
beam_dim, prefix="default"):
"""Given sequences and scores, will gather the top k=beam size sequences.
This function is used to grow alive, and finished. It takes sequences,
scores, and flags, and returns the top k from sequences, scores_to_gather,
and flags based on the values in scores.
This method permits easy introspection using tfdbg. It adds two named ops
that are prefixed by `prefix`:
- _topk_seq: the tensor for topk_seq returned by this method.
- _topk_flags: the tensor for topk_finished_flags returned by this method.
Args:
sequences: Tensor of sequences that we need to gather from.
[batch_size, beam_size, seq_length]
scores: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will use these to compute the topk.
scores_to_gather: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will return the gathered scores from here.
Scores to gather is different from scores because for grow_alive, we will
need to return log_probs, while for grow_finished, we will need to return
the length penalized scores.
flags: Tensor of bools for sequences that say whether a sequence has reached
EOS or not
beam_dim: mtf.Dimension
prefix: an optional string
Returns:
Tuple of
(topk_seq [batch_size, beam_size, decode_length],
topk_gathered_scores [batch_size, beam_size],
topk_finished_flags[batch_size, beam_size],
selector)
"""
unused_batch_dim, old_beam_dim, unused_length_dim = sequences.shape.dims
topk_indices, _ = mtf.top_k(scores, old_beam_dim, beam_dim)
selector = mtf.one_hot(topk_indices, old_beam_dim, dtype=tf.float32)
# Gather up the highest scoring sequences.
# For each operation added, give it
# a concrete name to simplify observing these operations with tfdbg.
# Clients can capture these tensors by watching these node names.
def gather(tensor, name):
with tf.name_scope(prefix + name):
output_shape = mtf.Shape(
[beam_dim if d == old_beam_dim else d for d in tensor.shape.dims])
return mtf.gather(
tensor, topk_indices, old_beam_dim, output_shape=output_shape)
topk_seq = gather(sequences, "_seq")
topk_flags = gather(flags, "_flags")
topk_gathered_scores = gather(scores_to_gather, "_scores")
return topk_seq, topk_gathered_scores, topk_flags, selector | def function[compute_topk_scores_and_seq, parameter[sequences, scores, scores_to_gather, flags, beam_dim, prefix]]:
constant[Given sequences and scores, will gather the top k=beam size sequences.
This function is used to grow alive, and finished. It takes sequences,
scores, and flags, and returns the top k from sequences, scores_to_gather,
and flags based on the values in scores.
This method permits easy introspection using tfdbg. It adds two named ops
that are prefixed by `prefix`:
- _topk_seq: the tensor for topk_seq returned by this method.
- _topk_flags: the tensor for topk_finished_flags returned by this method.
Args:
sequences: Tensor of sequences that we need to gather from.
[batch_size, beam_size, seq_length]
scores: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will use these to compute the topk.
scores_to_gather: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will return the gathered scores from here.
Scores to gather is different from scores because for grow_alive, we will
need to return log_probs, while for grow_finished, we will need to return
the length penalized scores.
flags: Tensor of bools for sequences that say whether a sequence has reached
EOS or not
beam_dim: mtf.Dimension
prefix: an optional string
Returns:
Tuple of
(topk_seq [batch_size, beam_size, decode_length],
topk_gathered_scores [batch_size, beam_size],
topk_finished_flags[batch_size, beam_size],
selector)
]
<ast.Tuple object at 0x7da207f02350> assign[=] name[sequences].shape.dims
<ast.Tuple object at 0x7da18eb56800> assign[=] call[name[mtf].top_k, parameter[name[scores], name[old_beam_dim], name[beam_dim]]]
variable[selector] assign[=] call[name[mtf].one_hot, parameter[name[topk_indices], name[old_beam_dim]]]
def function[gather, parameter[tensor, name]]:
with call[name[tf].name_scope, parameter[binary_operation[name[prefix] + name[name]]]] begin[:]
variable[output_shape] assign[=] call[name[mtf].Shape, parameter[<ast.ListComp object at 0x7da18f00e170>]]
return[call[name[mtf].gather, parameter[name[tensor], name[topk_indices], name[old_beam_dim]]]]
variable[topk_seq] assign[=] call[name[gather], parameter[name[sequences], constant[_seq]]]
variable[topk_flags] assign[=] call[name[gather], parameter[name[flags], constant[_flags]]]
variable[topk_gathered_scores] assign[=] call[name[gather], parameter[name[scores_to_gather], constant[_scores]]]
return[tuple[[<ast.Name object at 0x7da18f00fbb0>, <ast.Name object at 0x7da18f00c370>, <ast.Name object at 0x7da18f00e740>, <ast.Name object at 0x7da18f00cc10>]]] | keyword[def] identifier[compute_topk_scores_and_seq] ( identifier[sequences] , identifier[scores] , identifier[scores_to_gather] , identifier[flags] ,
identifier[beam_dim] , identifier[prefix] = literal[string] ):
literal[string]
identifier[unused_batch_dim] , identifier[old_beam_dim] , identifier[unused_length_dim] = identifier[sequences] . identifier[shape] . identifier[dims]
identifier[topk_indices] , identifier[_] = identifier[mtf] . identifier[top_k] ( identifier[scores] , identifier[old_beam_dim] , identifier[beam_dim] )
identifier[selector] = identifier[mtf] . identifier[one_hot] ( identifier[topk_indices] , identifier[old_beam_dim] , identifier[dtype] = identifier[tf] . identifier[float32] )
keyword[def] identifier[gather] ( identifier[tensor] , identifier[name] ):
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[prefix] + identifier[name] ):
identifier[output_shape] = identifier[mtf] . identifier[Shape] (
[ identifier[beam_dim] keyword[if] identifier[d] == identifier[old_beam_dim] keyword[else] identifier[d] keyword[for] identifier[d] keyword[in] identifier[tensor] . identifier[shape] . identifier[dims] ])
keyword[return] identifier[mtf] . identifier[gather] (
identifier[tensor] , identifier[topk_indices] , identifier[old_beam_dim] , identifier[output_shape] = identifier[output_shape] )
identifier[topk_seq] = identifier[gather] ( identifier[sequences] , literal[string] )
identifier[topk_flags] = identifier[gather] ( identifier[flags] , literal[string] )
identifier[topk_gathered_scores] = identifier[gather] ( identifier[scores_to_gather] , literal[string] )
keyword[return] identifier[topk_seq] , identifier[topk_gathered_scores] , identifier[topk_flags] , identifier[selector] | def compute_topk_scores_and_seq(sequences, scores, scores_to_gather, flags, beam_dim, prefix='default'):
"""Given sequences and scores, will gather the top k=beam size sequences.
This function is used to grow alive, and finished. It takes sequences,
scores, and flags, and returns the top k from sequences, scores_to_gather,
and flags based on the values in scores.
This method permits easy introspection using tfdbg. It adds two named ops
that are prefixed by `prefix`:
- _topk_seq: the tensor for topk_seq returned by this method.
- _topk_flags: the tensor for topk_finished_flags returned by this method.
Args:
sequences: Tensor of sequences that we need to gather from.
[batch_size, beam_size, seq_length]
scores: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will use these to compute the topk.
scores_to_gather: Tensor of scores for each sequence in sequences.
[batch_size, beam_size]. We will return the gathered scores from here.
Scores to gather is different from scores because for grow_alive, we will
need to return log_probs, while for grow_finished, we will need to return
the length penalized scores.
flags: Tensor of bools for sequences that say whether a sequence has reached
EOS or not
beam_dim: mtf.Dimension
prefix: an optional string
Returns:
Tuple of
(topk_seq [batch_size, beam_size, decode_length],
topk_gathered_scores [batch_size, beam_size],
topk_finished_flags[batch_size, beam_size],
selector)
"""
(unused_batch_dim, old_beam_dim, unused_length_dim) = sequences.shape.dims
(topk_indices, _) = mtf.top_k(scores, old_beam_dim, beam_dim)
selector = mtf.one_hot(topk_indices, old_beam_dim, dtype=tf.float32)
# Gather up the highest scoring sequences.
# For each operation added, give it
# a concrete name to simplify observing these operations with tfdbg.
# Clients can capture these tensors by watching these node names.
def gather(tensor, name):
with tf.name_scope(prefix + name):
output_shape = mtf.Shape([beam_dim if d == old_beam_dim else d for d in tensor.shape.dims])
return mtf.gather(tensor, topk_indices, old_beam_dim, output_shape=output_shape) # depends on [control=['with'], data=[]]
topk_seq = gather(sequences, '_seq')
topk_flags = gather(flags, '_flags')
topk_gathered_scores = gather(scores_to_gather, '_scores')
return (topk_seq, topk_gathered_scores, topk_flags, selector) |
def OnAddReaders(self, addedreaders):
"""Called when a reader is inserted.
Adds the smart card reader to the smartcard readers tree."""
self.mutex.acquire()
try:
parentnode = self.root
for readertoadd in addedreaders:
# is the reader already here?
found = False
(childReader, cookie) = self.GetFirstChild(parentnode)
while childReader.IsOk() and not found:
if self.GetItemText(childReader) == str(readertoadd):
found = True
else:
(childReader, cookie) = self.GetNextChild(
parentnode, cookie)
if not found:
childReader = self.AppendItem(parentnode, str(readertoadd))
self.SetPyData(childReader, readertoadd)
self.SetItemImage(
childReader,
self.readerimageindex,
wx.TreeItemIcon_Normal)
self.SetItemImage(
childReader,
self.readerimageindex,
wx.TreeItemIcon_Expanded)
self.AddATR(
childReader,
self.GetATR(readertoadd))
self.Expand(childReader)
self.Expand(self.root)
finally:
self.mutex.release()
self.EnsureVisible(self.root)
self.Repaint() | def function[OnAddReaders, parameter[self, addedreaders]]:
constant[Called when a reader is inserted.
Adds the smart card reader to the smartcard readers tree.]
call[name[self].mutex.acquire, parameter[]]
<ast.Try object at 0x7da1b1d4c550>
call[name[self].EnsureVisible, parameter[name[self].root]]
call[name[self].Repaint, parameter[]] | keyword[def] identifier[OnAddReaders] ( identifier[self] , identifier[addedreaders] ):
literal[string]
identifier[self] . identifier[mutex] . identifier[acquire] ()
keyword[try] :
identifier[parentnode] = identifier[self] . identifier[root]
keyword[for] identifier[readertoadd] keyword[in] identifier[addedreaders] :
identifier[found] = keyword[False]
( identifier[childReader] , identifier[cookie] )= identifier[self] . identifier[GetFirstChild] ( identifier[parentnode] )
keyword[while] identifier[childReader] . identifier[IsOk] () keyword[and] keyword[not] identifier[found] :
keyword[if] identifier[self] . identifier[GetItemText] ( identifier[childReader] )== identifier[str] ( identifier[readertoadd] ):
identifier[found] = keyword[True]
keyword[else] :
( identifier[childReader] , identifier[cookie] )= identifier[self] . identifier[GetNextChild] (
identifier[parentnode] , identifier[cookie] )
keyword[if] keyword[not] identifier[found] :
identifier[childReader] = identifier[self] . identifier[AppendItem] ( identifier[parentnode] , identifier[str] ( identifier[readertoadd] ))
identifier[self] . identifier[SetPyData] ( identifier[childReader] , identifier[readertoadd] )
identifier[self] . identifier[SetItemImage] (
identifier[childReader] ,
identifier[self] . identifier[readerimageindex] ,
identifier[wx] . identifier[TreeItemIcon_Normal] )
identifier[self] . identifier[SetItemImage] (
identifier[childReader] ,
identifier[self] . identifier[readerimageindex] ,
identifier[wx] . identifier[TreeItemIcon_Expanded] )
identifier[self] . identifier[AddATR] (
identifier[childReader] ,
identifier[self] . identifier[GetATR] ( identifier[readertoadd] ))
identifier[self] . identifier[Expand] ( identifier[childReader] )
identifier[self] . identifier[Expand] ( identifier[self] . identifier[root] )
keyword[finally] :
identifier[self] . identifier[mutex] . identifier[release] ()
identifier[self] . identifier[EnsureVisible] ( identifier[self] . identifier[root] )
identifier[self] . identifier[Repaint] () | def OnAddReaders(self, addedreaders):
"""Called when a reader is inserted.
Adds the smart card reader to the smartcard readers tree."""
self.mutex.acquire()
try:
parentnode = self.root
for readertoadd in addedreaders:
# is the reader already here?
found = False
(childReader, cookie) = self.GetFirstChild(parentnode)
while childReader.IsOk() and (not found):
if self.GetItemText(childReader) == str(readertoadd):
found = True # depends on [control=['if'], data=[]]
else:
(childReader, cookie) = self.GetNextChild(parentnode, cookie) # depends on [control=['while'], data=[]]
if not found:
childReader = self.AppendItem(parentnode, str(readertoadd))
self.SetPyData(childReader, readertoadd)
self.SetItemImage(childReader, self.readerimageindex, wx.TreeItemIcon_Normal)
self.SetItemImage(childReader, self.readerimageindex, wx.TreeItemIcon_Expanded)
self.AddATR(childReader, self.GetATR(readertoadd))
self.Expand(childReader) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['readertoadd']]
self.Expand(self.root) # depends on [control=['try'], data=[]]
finally:
self.mutex.release()
self.EnsureVisible(self.root)
self.Repaint() |
def sync_matchers(saltenv=None, refresh=False, extmod_whitelist=None, extmod_blacklist=None):
'''
.. versionadded:: 2019.2.0
Sync engine modules from ``salt://_matchers`` to the minion
saltenv
The fileserver environment from which to sync. To sync from more than
one environment, pass a comma-separated list.
If not passed, then all environments configured in the :ref:`top files
<states-top>` will be checked for engines to sync. If no top files are
found, then the ``base`` environment will be synced.
refresh : True
If ``True``, refresh the available execution modules on the minion.
This refresh will be performed even if no new matcher modules are synced.
Set to ``False`` to prevent this refresh.
extmod_whitelist : None
comma-separated list of modules to sync
extmod_blacklist : None
comma-separated list of modules to blacklist based on type
CLI Examples:
.. code-block:: bash
salt '*' saltutil.sync_matchers
salt '*' saltutil.sync_matchers saltenv=base,dev
'''
ret = _sync('matchers', saltenv, extmod_whitelist, extmod_blacklist)
if refresh:
refresh_modules()
return ret | def function[sync_matchers, parameter[saltenv, refresh, extmod_whitelist, extmod_blacklist]]:
constant[
.. versionadded:: 2019.2.0
Sync engine modules from ``salt://_matchers`` to the minion
saltenv
The fileserver environment from which to sync. To sync from more than
one environment, pass a comma-separated list.
If not passed, then all environments configured in the :ref:`top files
<states-top>` will be checked for engines to sync. If no top files are
found, then the ``base`` environment will be synced.
refresh : True
If ``True``, refresh the available execution modules on the minion.
This refresh will be performed even if no new matcher modules are synced.
Set to ``False`` to prevent this refresh.
extmod_whitelist : None
comma-separated list of modules to sync
extmod_blacklist : None
comma-separated list of modules to blacklist based on type
CLI Examples:
.. code-block:: bash
salt '*' saltutil.sync_matchers
salt '*' saltutil.sync_matchers saltenv=base,dev
]
variable[ret] assign[=] call[name[_sync], parameter[constant[matchers], name[saltenv], name[extmod_whitelist], name[extmod_blacklist]]]
if name[refresh] begin[:]
call[name[refresh_modules], parameter[]]
return[name[ret]] | keyword[def] identifier[sync_matchers] ( identifier[saltenv] = keyword[None] , identifier[refresh] = keyword[False] , identifier[extmod_whitelist] = keyword[None] , identifier[extmod_blacklist] = keyword[None] ):
literal[string]
identifier[ret] = identifier[_sync] ( literal[string] , identifier[saltenv] , identifier[extmod_whitelist] , identifier[extmod_blacklist] )
keyword[if] identifier[refresh] :
identifier[refresh_modules] ()
keyword[return] identifier[ret] | def sync_matchers(saltenv=None, refresh=False, extmod_whitelist=None, extmod_blacklist=None):
"""
.. versionadded:: 2019.2.0
Sync engine modules from ``salt://_matchers`` to the minion
saltenv
The fileserver environment from which to sync. To sync from more than
one environment, pass a comma-separated list.
If not passed, then all environments configured in the :ref:`top files
<states-top>` will be checked for engines to sync. If no top files are
found, then the ``base`` environment will be synced.
refresh : True
If ``True``, refresh the available execution modules on the minion.
This refresh will be performed even if no new matcher modules are synced.
Set to ``False`` to prevent this refresh.
extmod_whitelist : None
comma-separated list of modules to sync
extmod_blacklist : None
comma-separated list of modules to blacklist based on type
CLI Examples:
.. code-block:: bash
salt '*' saltutil.sync_matchers
salt '*' saltutil.sync_matchers saltenv=base,dev
"""
ret = _sync('matchers', saltenv, extmod_whitelist, extmod_blacklist)
if refresh:
refresh_modules() # depends on [control=['if'], data=[]]
return ret |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.