code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def matrix_at_check(self, original, loc, tokens):
"""Check for Python 3.5 matrix multiplication."""
return self.check_py("35", "matrix multiplication", original, loc, tokens)
|
def function[matrix_at_check, parameter[self, original, loc, tokens]]:
constant[Check for Python 3.5 matrix multiplication.]
return[call[name[self].check_py, parameter[constant[35], constant[matrix multiplication], name[original], name[loc], name[tokens]]]]
|
keyword[def] identifier[matrix_at_check] ( identifier[self] , identifier[original] , identifier[loc] , identifier[tokens] ):
literal[string]
keyword[return] identifier[self] . identifier[check_py] ( literal[string] , literal[string] , identifier[original] , identifier[loc] , identifier[tokens] )
|
def matrix_at_check(self, original, loc, tokens):
"""Check for Python 3.5 matrix multiplication."""
return self.check_py('35', 'matrix multiplication', original, loc, tokens)
|
def manipulate(self, stored_instance, component_instance):
"""
Manipulates the component instance
:param stored_instance: The iPOPO component StoredInstance
:param component_instance: The component instance
"""
# Store the stored instance
self._ipopo_instance = stored_instance
if self.__controller is None:
# No controller: do nothing
return
# Get the current value of the member (True by default)
controller_value = getattr(component_instance, self.__controller, True)
# Store the controller value
stored_instance.set_controller_state(
self.__controller, controller_value
)
# Prepare the methods names
getter_name = "{0}{1}".format(
ipopo_constants.IPOPO_CONTROLLER_PREFIX,
ipopo_constants.IPOPO_GETTER_SUFFIX,
)
setter_name = "{0}{1}".format(
ipopo_constants.IPOPO_CONTROLLER_PREFIX,
ipopo_constants.IPOPO_SETTER_SUFFIX,
)
# Inject the getter and setter at the instance level
getter, setter = self._field_controller_generator()
setattr(component_instance, getter_name, getter)
setattr(component_instance, setter_name, setter)
|
def function[manipulate, parameter[self, stored_instance, component_instance]]:
constant[
Manipulates the component instance
:param stored_instance: The iPOPO component StoredInstance
:param component_instance: The component instance
]
name[self]._ipopo_instance assign[=] name[stored_instance]
if compare[name[self].__controller is constant[None]] begin[:]
return[None]
variable[controller_value] assign[=] call[name[getattr], parameter[name[component_instance], name[self].__controller, constant[True]]]
call[name[stored_instance].set_controller_state, parameter[name[self].__controller, name[controller_value]]]
variable[getter_name] assign[=] call[constant[{0}{1}].format, parameter[name[ipopo_constants].IPOPO_CONTROLLER_PREFIX, name[ipopo_constants].IPOPO_GETTER_SUFFIX]]
variable[setter_name] assign[=] call[constant[{0}{1}].format, parameter[name[ipopo_constants].IPOPO_CONTROLLER_PREFIX, name[ipopo_constants].IPOPO_SETTER_SUFFIX]]
<ast.Tuple object at 0x7da1b04d4640> assign[=] call[name[self]._field_controller_generator, parameter[]]
call[name[setattr], parameter[name[component_instance], name[getter_name], name[getter]]]
call[name[setattr], parameter[name[component_instance], name[setter_name], name[setter]]]
|
keyword[def] identifier[manipulate] ( identifier[self] , identifier[stored_instance] , identifier[component_instance] ):
literal[string]
identifier[self] . identifier[_ipopo_instance] = identifier[stored_instance]
keyword[if] identifier[self] . identifier[__controller] keyword[is] keyword[None] :
keyword[return]
identifier[controller_value] = identifier[getattr] ( identifier[component_instance] , identifier[self] . identifier[__controller] , keyword[True] )
identifier[stored_instance] . identifier[set_controller_state] (
identifier[self] . identifier[__controller] , identifier[controller_value]
)
identifier[getter_name] = literal[string] . identifier[format] (
identifier[ipopo_constants] . identifier[IPOPO_CONTROLLER_PREFIX] ,
identifier[ipopo_constants] . identifier[IPOPO_GETTER_SUFFIX] ,
)
identifier[setter_name] = literal[string] . identifier[format] (
identifier[ipopo_constants] . identifier[IPOPO_CONTROLLER_PREFIX] ,
identifier[ipopo_constants] . identifier[IPOPO_SETTER_SUFFIX] ,
)
identifier[getter] , identifier[setter] = identifier[self] . identifier[_field_controller_generator] ()
identifier[setattr] ( identifier[component_instance] , identifier[getter_name] , identifier[getter] )
identifier[setattr] ( identifier[component_instance] , identifier[setter_name] , identifier[setter] )
|
def manipulate(self, stored_instance, component_instance):
"""
Manipulates the component instance
:param stored_instance: The iPOPO component StoredInstance
:param component_instance: The component instance
"""
# Store the stored instance
self._ipopo_instance = stored_instance
if self.__controller is None:
# No controller: do nothing
return # depends on [control=['if'], data=[]]
# Get the current value of the member (True by default)
controller_value = getattr(component_instance, self.__controller, True)
# Store the controller value
stored_instance.set_controller_state(self.__controller, controller_value)
# Prepare the methods names
getter_name = '{0}{1}'.format(ipopo_constants.IPOPO_CONTROLLER_PREFIX, ipopo_constants.IPOPO_GETTER_SUFFIX)
setter_name = '{0}{1}'.format(ipopo_constants.IPOPO_CONTROLLER_PREFIX, ipopo_constants.IPOPO_SETTER_SUFFIX)
# Inject the getter and setter at the instance level
(getter, setter) = self._field_controller_generator()
setattr(component_instance, getter_name, getter)
setattr(component_instance, setter_name, setter)
|
def _get_hosts_from_ports(self, ports):
""" validate hostnames from a list of ports
"""
hosts = map(lambda x: 'localhost:%d' % int(x.strip()), ports.split(','))
return list(set(hosts))
|
def function[_get_hosts_from_ports, parameter[self, ports]]:
constant[ validate hostnames from a list of ports
]
variable[hosts] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18f09fdf0>, call[name[ports].split, parameter[constant[,]]]]]
return[call[name[list], parameter[call[name[set], parameter[name[hosts]]]]]]
|
keyword[def] identifier[_get_hosts_from_ports] ( identifier[self] , identifier[ports] ):
literal[string]
identifier[hosts] = identifier[map] ( keyword[lambda] identifier[x] : literal[string] % identifier[int] ( identifier[x] . identifier[strip] ()), identifier[ports] . identifier[split] ( literal[string] ))
keyword[return] identifier[list] ( identifier[set] ( identifier[hosts] ))
|
def _get_hosts_from_ports(self, ports):
""" validate hostnames from a list of ports
"""
hosts = map(lambda x: 'localhost:%d' % int(x.strip()), ports.split(','))
return list(set(hosts))
|
def to_ini(self):
""" Get the ini string of the current parser.
:return: The ini string of the current parser
:rtype: str
"""
fake_io = io.StringIO()
self.write(fake_io)
return fake_io.getvalue()
|
def function[to_ini, parameter[self]]:
constant[ Get the ini string of the current parser.
:return: The ini string of the current parser
:rtype: str
]
variable[fake_io] assign[=] call[name[io].StringIO, parameter[]]
call[name[self].write, parameter[name[fake_io]]]
return[call[name[fake_io].getvalue, parameter[]]]
|
keyword[def] identifier[to_ini] ( identifier[self] ):
literal[string]
identifier[fake_io] = identifier[io] . identifier[StringIO] ()
identifier[self] . identifier[write] ( identifier[fake_io] )
keyword[return] identifier[fake_io] . identifier[getvalue] ()
|
def to_ini(self):
""" Get the ini string of the current parser.
:return: The ini string of the current parser
:rtype: str
"""
fake_io = io.StringIO()
self.write(fake_io)
return fake_io.getvalue()
|
def plot_gender(data, options):
"""Plots the gender.
:param data: the data to plot.
:param options: the options.
:type data: numpy.recarray
:type options: argparse.Namespace
Plots the summarized intensities of the markers on the Y chromosomes in
function of the markers on the X chromosomes, with problematic samples with
different colors.
Also uses :py:func:`print_data_to_file` to save the data, so that it is
faster to rerun the analysis.
"""
if data is None:
# there is a problem...
msg = ("no data: specify either '--bfile' and '--intensities', or "
"'--summarized-intensities'")
raise ProgramError(msg)
import matplotlib as mpl
if options.format != "X11" and mpl.get_backend() != "agg":
mpl.use("Agg")
import matplotlib.pyplot as plt
if options.format != "X11":
plt.ioff()
# The figure and axes
fig = plt.figure()
fig.subplots_adjust(top=0.84)
ax = fig.add_subplot(111)
# Changing the spines
ax.xaxis.set_ticks_position("bottom")
ax.yaxis.set_ticks_position("left")
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
# Setting the title
ax.set_xlabel(options.xlabel)
ax.set_ylabel(options.ylabel)
# For the legend
plot_object = []
labels = []
# Plotting the OK males
males = np.logical_and(data["gender"] == "Male", data["status"] == "OK")
tmp, = ax.plot(data["chr23"][males], data["chr24"][males], "o", ms=5,
mec="#0099CC", mfc="#0099CC")
plot_object.append(tmp)
labels.append("OK Males (n={})".format(sum(males)))
if options.summarized_intensities is None:
print_data_to_file(data[males], "{}.ok_males.txt".format(options.out))
# Plotting the OK females
females = np.logical_and(data["gender"] == "Female",
data["status"] == "OK")
tmp, = ax.plot(data["chr23"][females], data["chr24"][females], "o", ms=5,
mec="#CC0000", mfc="#CC0000")
plot_object.append(tmp)
labels.append("OK Females (n={})".format(sum(females)))
if options.summarized_intensities is None:
print_data_to_file(data[females],
"{}.ok_females.txt".format(options.out))
# Plotting the OK unknowns
unknowns = np.logical_and(data["gender"] == "Unknown",
data["status"] == "OK")
tmp, = ax.plot(data["chr23"][unknowns], data["chr24"][unknowns], "o", ms=5,
mec="#555555", mfc="#555555")
plot_object.append(tmp)
labels.append("OK Unknowns (n={})".format(sum(unknowns)))
if options.summarized_intensities is None:
print_data_to_file(data[unknowns],
"{}.ok_unknowns.txt".format(options.out))
# Plotting the Problem males
males = np.logical_and(data["gender"] == "Male",
data["status"] == "Problem")
tmp, = ax.plot(data["chr23"][males], data["chr24"][males], "^", ms=6,
mec="#000000", mfc="#669900")
plot_object.append(tmp)
labels.append("Problematic Males (n={})".format(sum(males)))
if options.summarized_intensities is None:
print_data_to_file(data[males],
"{}.problematic_males.txt".format(options.out))
# Plotting the Problem females
females = np.logical_and(data["gender"] == "Female",
data["status"] == "Problem")
tmp, = ax.plot(data["chr23"][females], data["chr24"][females], "v", ms=6,
mec="#000000", mfc="#9933CC")
plot_object.append(tmp)
labels.append("Problematic Females (n={})".format(sum(females)))
if options.summarized_intensities is None:
print_data_to_file(data[females],
"{}.problematic_females.txt".format(options.out))
# Plotting the Problem unknowns
unknowns = np.logical_and(data["gender"] == "Unknown",
data["status"] == "Problem")
tmp, = ax.plot(data["chr23"][unknowns], data["chr24"][unknowns], ">", ms=6,
mec="#000000", mfc="#555555")
plot_object.append(tmp)
labels.append("Problematic Unknown (n={})".format(sum(unknowns)))
if options.summarized_intensities is None:
print_data_to_file(data[unknowns],
"{}.problematic_unknowns.txt".format(options.out))
# the legend
prop = mpl.font_manager.FontProperties(size=10)
leg = ax.legend(plot_object, labels, loc=8, numpoints=1, fancybox=True,
prop=prop, ncol=2, bbox_to_anchor=(0., 1.02, 1., .102),
borderaxespad=0.)
# Setting the limit
xlim = ax.get_xlim()
ax.set_xlim((xlim[0]-0.01, xlim[1]+0.01))
ylim = ax.get_ylim()
ax.set_ylim((ylim[0]-0.01, ylim[1]+0.01))
if options.format == "X11":
plt.show()
else:
file_name = "{}.{}".format(options.out, options.format)
try:
plt.savefig(file_name)
except IOError:
msg = "{}: can't write file".format(file_name)
raise ProgramError(msg)
|
def function[plot_gender, parameter[data, options]]:
constant[Plots the gender.
:param data: the data to plot.
:param options: the options.
:type data: numpy.recarray
:type options: argparse.Namespace
Plots the summarized intensities of the markers on the Y chromosomes in
function of the markers on the X chromosomes, with problematic samples with
different colors.
Also uses :py:func:`print_data_to_file` to save the data, so that it is
faster to rerun the analysis.
]
if compare[name[data] is constant[None]] begin[:]
variable[msg] assign[=] constant[no data: specify either '--bfile' and '--intensities', or '--summarized-intensities']
<ast.Raise object at 0x7da1b0a01900>
import module[matplotlib] as alias[mpl]
if <ast.BoolOp object at 0x7da1b0a01780> begin[:]
call[name[mpl].use, parameter[constant[Agg]]]
import module[matplotlib.pyplot] as alias[plt]
if compare[name[options].format not_equal[!=] constant[X11]] begin[:]
call[name[plt].ioff, parameter[]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
call[name[fig].subplots_adjust, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].xaxis.set_ticks_position, parameter[constant[bottom]]]
call[name[ax].yaxis.set_ticks_position, parameter[constant[left]]]
call[call[name[ax].spines][constant[top]].set_visible, parameter[constant[False]]]
call[call[name[ax].spines][constant[right]].set_visible, parameter[constant[False]]]
call[name[ax].set_xlabel, parameter[name[options].xlabel]]
call[name[ax].set_ylabel, parameter[name[options].ylabel]]
variable[plot_object] assign[=] list[[]]
variable[labels] assign[=] list[[]]
variable[males] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Male]], compare[call[name[data]][constant[status]] equal[==] constant[OK]]]]
<ast.Tuple object at 0x7da1b0a001f0> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[males]], call[call[name[data]][constant[chr24]]][name[males]], constant[o]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[OK Males (n={})].format, parameter[call[name[sum], parameter[name[males]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[males]], call[constant[{}.ok_males.txt].format, parameter[name[options].out]]]]
variable[females] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Female]], compare[call[name[data]][constant[status]] equal[==] constant[OK]]]]
<ast.Tuple object at 0x7da204566650> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[females]], call[call[name[data]][constant[chr24]]][name[females]], constant[o]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[OK Females (n={})].format, parameter[call[name[sum], parameter[name[females]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[females]], call[constant[{}.ok_females.txt].format, parameter[name[options].out]]]]
variable[unknowns] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Unknown]], compare[call[name[data]][constant[status]] equal[==] constant[OK]]]]
<ast.Tuple object at 0x7da1b0ac61d0> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[unknowns]], call[call[name[data]][constant[chr24]]][name[unknowns]], constant[o]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[OK Unknowns (n={})].format, parameter[call[name[sum], parameter[name[unknowns]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[unknowns]], call[constant[{}.ok_unknowns.txt].format, parameter[name[options].out]]]]
variable[males] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Male]], compare[call[name[data]][constant[status]] equal[==] constant[Problem]]]]
<ast.Tuple object at 0x7da1b0ac5450> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[males]], call[call[name[data]][constant[chr24]]][name[males]], constant[^]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[Problematic Males (n={})].format, parameter[call[name[sum], parameter[name[males]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[males]], call[constant[{}.problematic_males.txt].format, parameter[name[options].out]]]]
variable[females] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Female]], compare[call[name[data]][constant[status]] equal[==] constant[Problem]]]]
<ast.Tuple object at 0x7da1b0ac46d0> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[females]], call[call[name[data]][constant[chr24]]][name[females]], constant[v]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[Problematic Females (n={})].format, parameter[call[name[sum], parameter[name[females]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[females]], call[constant[{}.problematic_females.txt].format, parameter[name[options].out]]]]
variable[unknowns] assign[=] call[name[np].logical_and, parameter[compare[call[name[data]][constant[gender]] equal[==] constant[Unknown]], compare[call[name[data]][constant[status]] equal[==] constant[Problem]]]]
<ast.Tuple object at 0x7da1b0a64f70> assign[=] call[name[ax].plot, parameter[call[call[name[data]][constant[chr23]]][name[unknowns]], call[call[name[data]][constant[chr24]]][name[unknowns]], constant[>]]]
call[name[plot_object].append, parameter[name[tmp]]]
call[name[labels].append, parameter[call[constant[Problematic Unknown (n={})].format, parameter[call[name[sum], parameter[name[unknowns]]]]]]]
if compare[name[options].summarized_intensities is constant[None]] begin[:]
call[name[print_data_to_file], parameter[call[name[data]][name[unknowns]], call[constant[{}.problematic_unknowns.txt].format, parameter[name[options].out]]]]
variable[prop] assign[=] call[name[mpl].font_manager.FontProperties, parameter[]]
variable[leg] assign[=] call[name[ax].legend, parameter[name[plot_object], name[labels]]]
variable[xlim] assign[=] call[name[ax].get_xlim, parameter[]]
call[name[ax].set_xlim, parameter[tuple[[<ast.BinOp object at 0x7da1b0a66710>, <ast.BinOp object at 0x7da207f00820>]]]]
variable[ylim] assign[=] call[name[ax].get_ylim, parameter[]]
call[name[ax].set_ylim, parameter[tuple[[<ast.BinOp object at 0x7da1b0972c80>, <ast.BinOp object at 0x7da1b09719c0>]]]]
if compare[name[options].format equal[==] constant[X11]] begin[:]
call[name[plt].show, parameter[]]
|
keyword[def] identifier[plot_gender] ( identifier[data] , identifier[options] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[import] identifier[matplotlib] keyword[as] identifier[mpl]
keyword[if] identifier[options] . identifier[format] != literal[string] keyword[and] identifier[mpl] . identifier[get_backend] ()!= literal[string] :
identifier[mpl] . identifier[use] ( literal[string] )
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[if] identifier[options] . identifier[format] != literal[string] :
identifier[plt] . identifier[ioff] ()
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[fig] . identifier[subplots_adjust] ( identifier[top] = literal[int] )
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[xaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[yaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[set_xlabel] ( identifier[options] . identifier[xlabel] )
identifier[ax] . identifier[set_ylabel] ( identifier[options] . identifier[ylabel] )
identifier[plot_object] =[]
identifier[labels] =[]
identifier[males] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] , identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[males] ], identifier[data] [ literal[string] ][ identifier[males] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[males] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[males] ], literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[females] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] ,
identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[females] ], identifier[data] [ literal[string] ][ identifier[females] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[females] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[females] ],
literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[unknowns] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] ,
identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[unknowns] ], identifier[data] [ literal[string] ][ identifier[unknowns] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[unknowns] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[unknowns] ],
literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[males] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] ,
identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[males] ], identifier[data] [ literal[string] ][ identifier[males] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[males] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[males] ],
literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[females] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] ,
identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[females] ], identifier[data] [ literal[string] ][ identifier[females] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[females] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[females] ],
literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[unknowns] = identifier[np] . identifier[logical_and] ( identifier[data] [ literal[string] ]== literal[string] ,
identifier[data] [ literal[string] ]== literal[string] )
identifier[tmp] ,= identifier[ax] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[unknowns] ], identifier[data] [ literal[string] ][ identifier[unknowns] ], literal[string] , identifier[ms] = literal[int] ,
identifier[mec] = literal[string] , identifier[mfc] = literal[string] )
identifier[plot_object] . identifier[append] ( identifier[tmp] )
identifier[labels] . identifier[append] ( literal[string] . identifier[format] ( identifier[sum] ( identifier[unknowns] )))
keyword[if] identifier[options] . identifier[summarized_intensities] keyword[is] keyword[None] :
identifier[print_data_to_file] ( identifier[data] [ identifier[unknowns] ],
literal[string] . identifier[format] ( identifier[options] . identifier[out] ))
identifier[prop] = identifier[mpl] . identifier[font_manager] . identifier[FontProperties] ( identifier[size] = literal[int] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[plot_object] , identifier[labels] , identifier[loc] = literal[int] , identifier[numpoints] = literal[int] , identifier[fancybox] = keyword[True] ,
identifier[prop] = identifier[prop] , identifier[ncol] = literal[int] , identifier[bbox_to_anchor] =( literal[int] , literal[int] , literal[int] , literal[int] ),
identifier[borderaxespad] = literal[int] )
identifier[xlim] = identifier[ax] . identifier[get_xlim] ()
identifier[ax] . identifier[set_xlim] (( identifier[xlim] [ literal[int] ]- literal[int] , identifier[xlim] [ literal[int] ]+ literal[int] ))
identifier[ylim] = identifier[ax] . identifier[get_ylim] ()
identifier[ax] . identifier[set_ylim] (( identifier[ylim] [ literal[int] ]- literal[int] , identifier[ylim] [ literal[int] ]+ literal[int] ))
keyword[if] identifier[options] . identifier[format] == literal[string] :
identifier[plt] . identifier[show] ()
keyword[else] :
identifier[file_name] = literal[string] . identifier[format] ( identifier[options] . identifier[out] , identifier[options] . identifier[format] )
keyword[try] :
identifier[plt] . identifier[savefig] ( identifier[file_name] )
keyword[except] identifier[IOError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[file_name] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
|
def plot_gender(data, options):
"""Plots the gender.
:param data: the data to plot.
:param options: the options.
:type data: numpy.recarray
:type options: argparse.Namespace
Plots the summarized intensities of the markers on the Y chromosomes in
function of the markers on the X chromosomes, with problematic samples with
different colors.
Also uses :py:func:`print_data_to_file` to save the data, so that it is
faster to rerun the analysis.
"""
if data is None:
# there is a problem...
msg = "no data: specify either '--bfile' and '--intensities', or '--summarized-intensities'"
raise ProgramError(msg) # depends on [control=['if'], data=[]]
import matplotlib as mpl
if options.format != 'X11' and mpl.get_backend() != 'agg':
mpl.use('Agg') # depends on [control=['if'], data=[]]
import matplotlib.pyplot as plt
if options.format != 'X11':
plt.ioff() # depends on [control=['if'], data=[]]
# The figure and axes
fig = plt.figure()
fig.subplots_adjust(top=0.84)
ax = fig.add_subplot(111)
# Changing the spines
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
# Setting the title
ax.set_xlabel(options.xlabel)
ax.set_ylabel(options.ylabel)
# For the legend
plot_object = []
labels = []
# Plotting the OK males
males = np.logical_and(data['gender'] == 'Male', data['status'] == 'OK')
(tmp,) = ax.plot(data['chr23'][males], data['chr24'][males], 'o', ms=5, mec='#0099CC', mfc='#0099CC')
plot_object.append(tmp)
labels.append('OK Males (n={})'.format(sum(males)))
if options.summarized_intensities is None:
print_data_to_file(data[males], '{}.ok_males.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# Plotting the OK females
females = np.logical_and(data['gender'] == 'Female', data['status'] == 'OK')
(tmp,) = ax.plot(data['chr23'][females], data['chr24'][females], 'o', ms=5, mec='#CC0000', mfc='#CC0000')
plot_object.append(tmp)
labels.append('OK Females (n={})'.format(sum(females)))
if options.summarized_intensities is None:
print_data_to_file(data[females], '{}.ok_females.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# Plotting the OK unknowns
unknowns = np.logical_and(data['gender'] == 'Unknown', data['status'] == 'OK')
(tmp,) = ax.plot(data['chr23'][unknowns], data['chr24'][unknowns], 'o', ms=5, mec='#555555', mfc='#555555')
plot_object.append(tmp)
labels.append('OK Unknowns (n={})'.format(sum(unknowns)))
if options.summarized_intensities is None:
print_data_to_file(data[unknowns], '{}.ok_unknowns.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# Plotting the Problem males
males = np.logical_and(data['gender'] == 'Male', data['status'] == 'Problem')
(tmp,) = ax.plot(data['chr23'][males], data['chr24'][males], '^', ms=6, mec='#000000', mfc='#669900')
plot_object.append(tmp)
labels.append('Problematic Males (n={})'.format(sum(males)))
if options.summarized_intensities is None:
print_data_to_file(data[males], '{}.problematic_males.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# Plotting the Problem females
females = np.logical_and(data['gender'] == 'Female', data['status'] == 'Problem')
(tmp,) = ax.plot(data['chr23'][females], data['chr24'][females], 'v', ms=6, mec='#000000', mfc='#9933CC')
plot_object.append(tmp)
labels.append('Problematic Females (n={})'.format(sum(females)))
if options.summarized_intensities is None:
print_data_to_file(data[females], '{}.problematic_females.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# Plotting the Problem unknowns
unknowns = np.logical_and(data['gender'] == 'Unknown', data['status'] == 'Problem')
(tmp,) = ax.plot(data['chr23'][unknowns], data['chr24'][unknowns], '>', ms=6, mec='#000000', mfc='#555555')
plot_object.append(tmp)
labels.append('Problematic Unknown (n={})'.format(sum(unknowns)))
if options.summarized_intensities is None:
print_data_to_file(data[unknowns], '{}.problematic_unknowns.txt'.format(options.out)) # depends on [control=['if'], data=[]]
# the legend
prop = mpl.font_manager.FontProperties(size=10)
leg = ax.legend(plot_object, labels, loc=8, numpoints=1, fancybox=True, prop=prop, ncol=2, bbox_to_anchor=(0.0, 1.02, 1.0, 0.102), borderaxespad=0.0)
# Setting the limit
xlim = ax.get_xlim()
ax.set_xlim((xlim[0] - 0.01, xlim[1] + 0.01))
ylim = ax.get_ylim()
ax.set_ylim((ylim[0] - 0.01, ylim[1] + 0.01))
if options.format == 'X11':
plt.show() # depends on [control=['if'], data=[]]
else:
file_name = '{}.{}'.format(options.out, options.format)
try:
plt.savefig(file_name) # depends on [control=['try'], data=[]]
except IOError:
msg = "{}: can't write file".format(file_name)
raise ProgramError(msg) # depends on [control=['except'], data=[]]
|
def _add_node(self, node):
"""Add a new node to node_list and give the node an ID.
Args:
node: An instance of Node.
Returns:
node_id: An integer.
"""
node_id = len(self.node_list)
self.node_to_id[node] = node_id
self.node_list.append(node)
self.adj_list[node_id] = []
self.reverse_adj_list[node_id] = []
return node_id
|
def function[_add_node, parameter[self, node]]:
constant[Add a new node to node_list and give the node an ID.
Args:
node: An instance of Node.
Returns:
node_id: An integer.
]
variable[node_id] assign[=] call[name[len], parameter[name[self].node_list]]
call[name[self].node_to_id][name[node]] assign[=] name[node_id]
call[name[self].node_list.append, parameter[name[node]]]
call[name[self].adj_list][name[node_id]] assign[=] list[[]]
call[name[self].reverse_adj_list][name[node_id]] assign[=] list[[]]
return[name[node_id]]
|
keyword[def] identifier[_add_node] ( identifier[self] , identifier[node] ):
literal[string]
identifier[node_id] = identifier[len] ( identifier[self] . identifier[node_list] )
identifier[self] . identifier[node_to_id] [ identifier[node] ]= identifier[node_id]
identifier[self] . identifier[node_list] . identifier[append] ( identifier[node] )
identifier[self] . identifier[adj_list] [ identifier[node_id] ]=[]
identifier[self] . identifier[reverse_adj_list] [ identifier[node_id] ]=[]
keyword[return] identifier[node_id]
|
def _add_node(self, node):
"""Add a new node to node_list and give the node an ID.
Args:
node: An instance of Node.
Returns:
node_id: An integer.
"""
node_id = len(self.node_list)
self.node_to_id[node] = node_id
self.node_list.append(node)
self.adj_list[node_id] = []
self.reverse_adj_list[node_id] = []
return node_id
|
def token(self):
"""
Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred.
"""
if not self._token:
self._token = self.fetch_token()
logger.payment(self)
return self._token
|
def function[token, parameter[self]]:
constant[
Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred.
]
if <ast.UnaryOp object at 0x7da18f722050> begin[:]
name[self]._token assign[=] call[name[self].fetch_token, parameter[]]
call[name[logger].payment, parameter[name[self]]]
return[name[self]._token]
|
keyword[def] identifier[token] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_token] :
identifier[self] . identifier[_token] = identifier[self] . identifier[fetch_token] ()
identifier[logger] . identifier[payment] ( identifier[self] )
keyword[return] identifier[self] . identifier[_token]
|
def token(self):
"""
Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred.
"""
if not self._token:
self._token = self.fetch_token()
logger.payment(self) # depends on [control=['if'], data=[]]
return self._token
|
def _set_error_counters(self, v, load=False):
"""
Setter method for error_counters, mapped from YANG variable /mpls_state/rsvp/interfaces/error_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_counters() directly.
YANG Description: RSVP interface error counters
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=error_counters.error_counters, is_container='container', presence=False, yang_name="error-counters", rest_name="error-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-rsvp-interface-error-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """error_counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=error_counters.error_counters, is_container='container', presence=False, yang_name="error-counters", rest_name="error-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-rsvp-interface-error-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""",
})
self.__error_counters = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_error_counters, parameter[self, v, load]]:
constant[
Setter method for error_counters, mapped from YANG variable /mpls_state/rsvp/interfaces/error_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_counters() directly.
YANG Description: RSVP interface error counters
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f00ed10>
name[self].__error_counters assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_error_counters] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[error_counters] . identifier[error_counters] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__error_counters] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_error_counters(self, v, load=False):
"""
Setter method for error_counters, mapped from YANG variable /mpls_state/rsvp/interfaces/error_counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_counters() directly.
YANG Description: RSVP interface error counters
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=error_counters.error_counters, is_container='container', presence=False, yang_name='error-counters', rest_name='error-counters', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-rsvp-interface-error-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'error_counters must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=error_counters.error_counters, is_container=\'container\', presence=False, yang_name="error-counters", rest_name="error-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'mpls-rsvp-interface-error-counters\', u\'cli-suppress-show-path\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls-operational\', defining_module=\'brocade-mpls-operational\', yang_type=\'container\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__error_counters = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def connect(self, *names):
"""
Connects a list of names, one to the next.
"""
fromName, toName, rest = names[0], names[1], names[2:]
self.connectAt(fromName, toName)
if len(rest) != 0:
self.connect(toName, *rest)
|
def function[connect, parameter[self]]:
constant[
Connects a list of names, one to the next.
]
<ast.Tuple object at 0x7da1b0358af0> assign[=] tuple[[<ast.Subscript object at 0x7da1b035a380>, <ast.Subscript object at 0x7da1b035abf0>, <ast.Subscript object at 0x7da1b0359c60>]]
call[name[self].connectAt, parameter[name[fromName], name[toName]]]
if compare[call[name[len], parameter[name[rest]]] not_equal[!=] constant[0]] begin[:]
call[name[self].connect, parameter[name[toName], <ast.Starred object at 0x7da1b0359270>]]
|
keyword[def] identifier[connect] ( identifier[self] ,* identifier[names] ):
literal[string]
identifier[fromName] , identifier[toName] , identifier[rest] = identifier[names] [ literal[int] ], identifier[names] [ literal[int] ], identifier[names] [ literal[int] :]
identifier[self] . identifier[connectAt] ( identifier[fromName] , identifier[toName] )
keyword[if] identifier[len] ( identifier[rest] )!= literal[int] :
identifier[self] . identifier[connect] ( identifier[toName] ,* identifier[rest] )
|
def connect(self, *names):
"""
Connects a list of names, one to the next.
"""
(fromName, toName, rest) = (names[0], names[1], names[2:])
self.connectAt(fromName, toName)
if len(rest) != 0:
self.connect(toName, *rest) # depends on [control=['if'], data=[]]
|
def set_cloexec(fd):
"""Set the file descriptor `fd` to automatically close on
:func:`os.execve`. This has no effect on file descriptors inherited across
:func:`os.fork`, they must be explicitly closed through some other means,
such as :func:`mitogen.fork.on_fork`."""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert fd > 2
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
|
def function[set_cloexec, parameter[fd]]:
constant[Set the file descriptor `fd` to automatically close on
:func:`os.execve`. This has no effect on file descriptors inherited across
:func:`os.fork`, they must be explicitly closed through some other means,
such as :func:`mitogen.fork.on_fork`.]
variable[flags] assign[=] call[name[fcntl].fcntl, parameter[name[fd], name[fcntl].F_GETFD]]
assert[compare[name[fd] greater[>] constant[2]]]
call[name[fcntl].fcntl, parameter[name[fd], name[fcntl].F_SETFD, binary_operation[name[flags] <ast.BitOr object at 0x7da2590d6aa0> name[fcntl].FD_CLOEXEC]]]
|
keyword[def] identifier[set_cloexec] ( identifier[fd] ):
literal[string]
identifier[flags] = identifier[fcntl] . identifier[fcntl] ( identifier[fd] , identifier[fcntl] . identifier[F_GETFD] )
keyword[assert] identifier[fd] > literal[int]
identifier[fcntl] . identifier[fcntl] ( identifier[fd] , identifier[fcntl] . identifier[F_SETFD] , identifier[flags] | identifier[fcntl] . identifier[FD_CLOEXEC] )
|
def set_cloexec(fd):
"""Set the file descriptor `fd` to automatically close on
:func:`os.execve`. This has no effect on file descriptors inherited across
:func:`os.fork`, they must be explicitly closed through some other means,
such as :func:`mitogen.fork.on_fork`."""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert fd > 2
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
|
def set_word_at_rva(self, rva, word):
"""Set the word value at the file offset corresponding to the given RVA."""
return self.set_bytes_at_rva(rva, self.get_data_from_word(word))
|
def function[set_word_at_rva, parameter[self, rva, word]]:
constant[Set the word value at the file offset corresponding to the given RVA.]
return[call[name[self].set_bytes_at_rva, parameter[name[rva], call[name[self].get_data_from_word, parameter[name[word]]]]]]
|
keyword[def] identifier[set_word_at_rva] ( identifier[self] , identifier[rva] , identifier[word] ):
literal[string]
keyword[return] identifier[self] . identifier[set_bytes_at_rva] ( identifier[rva] , identifier[self] . identifier[get_data_from_word] ( identifier[word] ))
|
def set_word_at_rva(self, rva, word):
"""Set the word value at the file offset corresponding to the given RVA."""
return self.set_bytes_at_rva(rva, self.get_data_from_word(word))
|
def timedelta_to_seconds(delta):
'''Convert a timedelta to seconds with the microseconds as fraction
Note that this method has become largely obsolete with the
`timedelta.total_seconds()` method introduced in Python 2.7.
>>> from datetime import timedelta
>>> '%d' % timedelta_to_seconds(timedelta(days=1))
'86400'
>>> '%d' % timedelta_to_seconds(timedelta(seconds=1))
'1'
>>> '%.6f' % timedelta_to_seconds(timedelta(seconds=1, microseconds=1))
'1.000001'
>>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1))
'0.000001'
'''
# Only convert to float if needed
if delta.microseconds:
total = delta.microseconds * 1e-6
else:
total = 0
total += delta.seconds
total += delta.days * 60 * 60 * 24
return total
|
def function[timedelta_to_seconds, parameter[delta]]:
constant[Convert a timedelta to seconds with the microseconds as fraction
Note that this method has become largely obsolete with the
`timedelta.total_seconds()` method introduced in Python 2.7.
>>> from datetime import timedelta
>>> '%d' % timedelta_to_seconds(timedelta(days=1))
'86400'
>>> '%d' % timedelta_to_seconds(timedelta(seconds=1))
'1'
>>> '%.6f' % timedelta_to_seconds(timedelta(seconds=1, microseconds=1))
'1.000001'
>>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1))
'0.000001'
]
if name[delta].microseconds begin[:]
variable[total] assign[=] binary_operation[name[delta].microseconds * constant[1e-06]]
<ast.AugAssign object at 0x7da1b0525b10>
<ast.AugAssign object at 0x7da1b0525660>
return[name[total]]
|
keyword[def] identifier[timedelta_to_seconds] ( identifier[delta] ):
literal[string]
keyword[if] identifier[delta] . identifier[microseconds] :
identifier[total] = identifier[delta] . identifier[microseconds] * literal[int]
keyword[else] :
identifier[total] = literal[int]
identifier[total] += identifier[delta] . identifier[seconds]
identifier[total] += identifier[delta] . identifier[days] * literal[int] * literal[int] * literal[int]
keyword[return] identifier[total]
|
def timedelta_to_seconds(delta):
"""Convert a timedelta to seconds with the microseconds as fraction
Note that this method has become largely obsolete with the
`timedelta.total_seconds()` method introduced in Python 2.7.
>>> from datetime import timedelta
>>> '%d' % timedelta_to_seconds(timedelta(days=1))
'86400'
>>> '%d' % timedelta_to_seconds(timedelta(seconds=1))
'1'
>>> '%.6f' % timedelta_to_seconds(timedelta(seconds=1, microseconds=1))
'1.000001'
>>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1))
'0.000001'
"""
# Only convert to float if needed
if delta.microseconds:
total = delta.microseconds * 1e-06 # depends on [control=['if'], data=[]]
else:
total = 0
total += delta.seconds
total += delta.days * 60 * 60 * 24
return total
|
def plot(self, x=None, y=None, z=None, what="count(*)", vwhat=None, reduce=["colormap"], f=None,
normalize="normalize", normalize_axis="what",
vmin=None, vmax=None,
shape=256, vshape=32, limits=None, grid=None, colormap="afmhot", # colors=["red", "green", "blue"],
figsize=None, xlabel=None, ylabel=None, aspect="auto", tight_layout=True, interpolation="nearest", show=False,
colorbar=True,
colorbar_label=None,
selection=None, selection_labels=None, title=None,
background_color="white", pre_blend=False, background_alpha=1.,
visual=dict(x="x", y="y", layer="z", fade="selection", row="subspace", column="what"),
smooth_pre=None, smooth_post=None,
wrap=True, wrap_columns=4,
return_extra=False, hardcopy=None):
"""Viz data in a 2d histogram/heatmap.
Declarative plotting of statistical plots using matplotlib, supports subplots, selections, layers.
Instead of passing x and y, pass a list as x argument for multiple panels. Give what a list of options to have multiple
panels. When both are present then will be origanized in a column/row order.
This methods creates a 6 dimensional 'grid', where each dimension can map the a visual dimension.
The grid dimensions are:
* x: shape determined by shape, content by x argument or the first dimension of each space
* y: ,,
* z: related to the z argument
* selection: shape equals length of selection argument
* what: shape equals length of what argument
* space: shape equals length of x argument if multiple values are given
By default, this its shape is (1, 1, 1, 1, shape, shape) (where x is the last dimension)
The visual dimensions are
* x: x coordinate on a plot / image (default maps to grid's x)
* y: y ,, (default maps to grid's y)
* layer: each image in this dimension is blended togeher to one image (default maps to z)
* fade: each image is shown faded after the next image (default mapt to selection)
* row: rows of subplots (default maps to space)
* columns: columns of subplot (default maps to what)
All these mappings can be changes by the visual argument, some examples:
>>> df.plot('x', 'y', what=['mean(x)', 'correlation(vx, vy)'])
Will plot each 'what' as a column.
>>> df.plot('x', 'y', selection=['FeH < -3', '(FeH >= -3) & (FeH < -2)'], visual=dict(column='selection'))
Will plot each selection as a column, instead of a faded on top of each other.
:param x: Expression to bin in the x direction (by default maps to x), or list of pairs, like [['x', 'y'], ['x', 'z']], if multiple pairs are given, this dimension maps to rows by default
:param y: y (by default maps to y)
:param z: Expression to bin in the z direction, followed by a :start,end,shape signature, like 'FeH:-3,1:5' will produce 5 layers between -10 and 10 (by default maps to layer)
:param what: What to plot, count(*) will show a N-d histogram, mean('x'), the mean of the x column, sum('x') the sum, std('x') the standard deviation, correlation('vx', 'vy') the correlation coefficient. Can also be a list of values, like ['count(x)', std('vx')], (by default maps to column)
:param reduce:
:param f: transform values by: 'identity' does nothing 'log' or 'log10' will show the log of the value
:param normalize: normalization function, currently only 'normalize' is supported
:param normalize_axis: which axes to normalize on, None means normalize by the global maximum.
:param vmin: instead of automatic normalization, (using normalize and normalization_axis) scale the data between vmin and vmax to [0, 1]
:param vmax: see vmin
:param shape: shape/size of the n-D histogram grid
:param limits: list of [[xmin, xmax], [ymin, ymax]], or a description such as 'minmax', '99%'
:param grid: if the binning is done before by yourself, you can pass it
:param colormap: matplotlib colormap to use
:param figsize: (x, y) tuple passed to pylab.figure for setting the figure size
:param xlabel:
:param ylabel:
:param aspect:
:param tight_layout: call pylab.tight_layout or not
:param colorbar: plot a colorbar or not
:param interpolation: interpolation for imshow, possible options are: 'nearest', 'bilinear', 'bicubic', see matplotlib for more
:param return_extra:
:return:
"""
import pylab
import matplotlib
n = _parse_n(normalize)
if type(shape) == int:
shape = (shape,) * 2
binby = []
x = _ensure_strings_from_expressions(x)
y = _ensure_strings_from_expressions(y)
for expression in [y, x]:
if expression is not None:
binby = [expression] + binby
fig = pylab.gcf()
if figsize is not None:
fig.set_size_inches(*figsize)
import re
what_units = None
whats = _ensure_list(what)
selections = _ensure_list(selection)
selections = _ensure_strings_from_expressions(selections)
if y is None:
waslist, [x, ] = vaex.utils.listify(x)
else:
waslist, [x, y] = vaex.utils.listify(x, y)
x = list(zip(x, y))
limits = [limits]
# every plot has its own vwhat for now
vwhats = _expand_limits(vwhat, len(x)) # TODO: we're abusing this function..
logger.debug("x: %s", x)
limits, shape = self.limits(x, limits, shape=shape)
shape = shape[0]
logger.debug("limits: %r", limits)
# mapping of a grid axis to a label
labels = {}
shape = _expand_shape(shape, 2)
vshape = _expand_shape(shape, 2)
if z is not None:
match = re.match("(.*):(.*),(.*),(.*)", z)
if match:
groups = match.groups()
import ast
z_expression = groups[0]
logger.debug("found groups: %r", list(groups))
z_limits = [ast.literal_eval(groups[1]), ast.literal_eval(groups[2])]
z_shape = ast.literal_eval(groups[3])
# for pair in x:
x = [[z_expression] + list(k) for k in x]
limits = np.array([[z_limits] + list(k) for k in limits])
shape = (z_shape,) + shape
vshape = (z_shape,) + vshape
logger.debug("x = %r", x)
values = np.linspace(z_limits[0], z_limits[1], num=z_shape + 1)
labels["z"] = list(["%s <= %s < %s" % (v1, z_expression, v2) for v1, v2 in zip(values[:-1], values[1:])])
else:
raise ValueError("Could not understand 'z' argument %r, expected something in form: 'column:-1,10:5'" % facet)
else:
z_shape = 1
# z == 1
if z is None:
total_grid = np.zeros((len(x), len(whats), len(selections), 1) + shape, dtype=float)
total_vgrid = np.zeros((len(x), len(whats), len(selections), 1) + vshape, dtype=float)
else:
total_grid = np.zeros((len(x), len(whats), len(selections)) + shape, dtype=float)
total_vgrid = np.zeros((len(x), len(whats), len(selections)) + vshape, dtype=float)
logger.debug("shape of total grid: %r", total_grid.shape)
axis = dict(plot=0, what=1, selection=2)
xlimits = limits
grid_axes = dict(x=-1, y=-2, z=-3, selection=-4, what=-5, subspace=-6)
visual_axes = dict(x=-1, y=-2, layer=-3, fade=-4, column=-5, row=-6)
# visual_default=dict(x="x", y="y", z="layer", selection="fade", subspace="row", what="column")
# visual: mapping of a plot axis, to a grid axis
visual_default = dict(x="x", y="y", layer="z", fade="selection", row="subspace", column="what")
def invert(x): return dict((v, k) for k, v in x.items())
# visual_default_reverse = invert(visual_default)
# visual_ = visual_default
# visual = dict(visual) # copy for modification
# add entries to avoid mapping multiple times to the same axis
free_visual_axes = list(visual_default.keys())
# visual_reverse = invert(visual)
logger.debug("1: %r %r", visual, free_visual_axes)
for visual_name, grid_name in visual.items():
if visual_name in free_visual_axes:
free_visual_axes.remove(visual_name)
else:
raise ValueError("visual axes %s used multiple times" % visual_name)
logger.debug("2: %r %r", visual, free_visual_axes)
for visual_name, grid_name in visual_default.items():
if visual_name in free_visual_axes and grid_name not in visual.values():
free_visual_axes.remove(visual_name)
visual[visual_name] = grid_name
logger.debug("3: %r %r", visual, free_visual_axes)
for visual_name, grid_name in visual_default.items():
if visual_name not in free_visual_axes and grid_name not in visual.values():
visual[free_visual_axes.pop(0)] = grid_name
logger.debug("4: %r %r", visual, free_visual_axes)
visual_reverse = invert(visual)
# TODO: the meaning of visual and visual_reverse is changed below this line, super confusing
visual, visual_reverse = visual_reverse, visual
# so now, visual: mapping of a grid axis to plot axis
# visual_reverse: mapping of a grid axis to plot axis
move = {}
for grid_name, visual_name in visual.items():
if visual_axes[visual_name] in visual.values():
index = visual.values().find(visual_name)
key = visual.keys()[index]
raise ValueError("trying to map %s to %s while, it is already mapped by %s" % (grid_name, visual_name, key))
move[grid_axes[grid_name]] = visual_axes[visual_name]
# normalize_axis = _ensure_list(normalize_axis)
fs = _expand(f, total_grid.shape[grid_axes[normalize_axis]])
# assert len(vwhat)
# labels["y"] = ylabels
what_labels = []
if grid is None:
grid_of_grids = []
for i, (binby, limits) in enumerate(zip(x, xlimits)):
grid_of_grids.append([])
for j, what in enumerate(whats):
if isinstance(what, vaex.stat.Expression):
grid = what.calculate(self, binby=binby, shape=shape, limits=limits, selection=selections, delay=True)
else:
what = what.strip()
index = what.index("(")
import re
groups = re.match("(.*)\((.*)\)", what).groups()
if groups and len(groups) == 2:
function = groups[0]
arguments = groups[1].strip()
if "," in arguments:
arguments = arguments.split(",")
functions = ["mean", "sum", "std", "var", "correlation", "covar", "min", "max", "median_approx"]
unit_expression = None
if function in ["mean", "sum", "std", "min", "max", "median"]:
unit_expression = arguments
if function in ["var"]:
unit_expression = "(%s) * (%s)" % (arguments, arguments)
if function in ["covar"]:
unit_expression = "(%s) * (%s)" % arguments
if unit_expression:
unit = self.unit(unit_expression)
if unit:
what_units = unit.to_string('latex_inline')
if function in functions:
grid = getattr(self, function)(arguments, binby=binby, limits=limits, shape=shape, selection=selections, delay=True)
elif function == "count":
grid = self.count(arguments, binby, shape=shape, limits=limits, selection=selections, delay=True)
else:
raise ValueError("Could not understand method: %s, expected one of %r'" % (function, functions))
else:
raise ValueError("Could not understand 'what' argument %r, expected something in form: 'count(*)', 'mean(x)'" % what)
if i == 0: # and j == 0:
what_label = str(whats[j])
if what_units:
what_label += " (%s)" % what_units
if fs[j]:
what_label = fs[j] + " " + what_label
what_labels.append(what_label)
grid_of_grids[-1].append(grid)
self.executor.execute()
for i, (binby, limits) in enumerate(zip(x, xlimits)):
for j, what in enumerate(whats):
grid = grid_of_grids[i][j].get()
total_grid[i, j, :, :] = grid[:, None, ...]
labels["what"] = what_labels
else:
dims_left = 6 - len(grid.shape)
total_grid = np.broadcast_to(grid, (1,) * dims_left + grid.shape)
# visual=dict(x="x", y="y", selection="fade", subspace="facet1", what="facet2",)
def _selection_name(name):
if name in [None, False]:
return "selection: all"
elif name in ["default", True]:
return "selection: default"
else:
return "selection: %s" % name
if selection_labels is None:
labels["selection"] = list([_selection_name(k) for k in selections])
else:
labels["selection"] = selection_labels
# visual_grid = np.moveaxis(total_grid, move.keys(), move.values())
# np.moveaxis is in np 1.11 only?, use transpose
axes = [None] * len(move)
for key, value in move.items():
axes[value] = key
visual_grid = np.transpose(total_grid, axes)
logger.debug("grid shape: %r", total_grid.shape)
logger.debug("visual: %r", visual.items())
logger.debug("move: %r", move)
logger.debug("visual grid shape: %r", visual_grid.shape)
xexpressions = []
yexpressions = []
for i, (binby, limits) in enumerate(zip(x, xlimits)):
xexpressions.append(binby[0])
yexpressions.append(binby[1])
if xlabel is None:
xlabels = []
ylabels = []
for i, (binby, limits) in enumerate(zip(x, xlimits)):
if z is not None:
xlabels.append(self.label(binby[1]))
ylabels.append(self.label(binby[2]))
else:
xlabels.append(self.label(binby[0]))
ylabels.append(self.label(binby[1]))
else:
Nl = visual_grid.shape[visual_axes['row']]
xlabels = _expand(xlabel, Nl)
ylabels = _expand(ylabel, Nl)
#labels[visual["x"]] = (xlabels, ylabels)
labels["x"] = xlabels
labels["y"] = ylabels
# grid = total_grid
# print(grid.shape)
# grid = self.reduce(grid, )
axes = []
# cax = pylab.subplot(1,1,1)
background_color = np.array(matplotlib.colors.colorConverter.to_rgb(background_color))
# if grid.shape[axis["selection"]] > 1:# and not facet:
# rgrid = vaex.image.fade(rgrid)
# finite_mask = np.any(finite_mask, axis=0) # do we really need this
# print(rgrid.shape)
# facet_row_axis = axis["what"]
import math
facet_columns = None
facets = visual_grid.shape[visual_axes["row"]] * visual_grid.shape[visual_axes["column"]]
if visual_grid.shape[visual_axes["column"]] == 1 and wrap:
facet_columns = min(wrap_columns, visual_grid.shape[visual_axes["row"]])
wrapped = True
elif visual_grid.shape[visual_axes["row"]] == 1 and wrap:
facet_columns = min(wrap_columns, visual_grid.shape[visual_axes["column"]])
wrapped = True
else:
wrapped = False
facet_columns = visual_grid.shape[visual_axes["column"]]
facet_rows = int(math.ceil(facets / facet_columns))
logger.debug("facet_rows: %r", facet_rows)
logger.debug("facet_columns: %r", facet_columns)
# if visual_grid.shape[visual_axes["row"]] > 1: # and not wrap:
# #facet_row_axis = axis["what"]
# facet_columns = visual_grid.shape[visual_axes["column"]]
# else:
# facet_columns = min(wrap_columns, facets)
# if grid.shape[axis["plot"]] > 1:# and not facet:
# this loop could be done using axis arguments everywhere
# assert len(normalize_axis) == 1, "currently only 1 normalization axis supported"
grid = visual_grid * 1.
fgrid = visual_grid * 1.
ngrid = visual_grid * 1.
# colorgrid = np.zeros(ngrid.shape + (4,), float)
# print "norma", normalize_axis, visual_grid.shape[visual_axes[visual[normalize_axis]]]
vmins = _expand(vmin, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list)
vmaxs = _expand(vmax, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list)
# for name in normalize_axis:
visual_grid
if smooth_pre:
grid = vaex.grids.gf(grid, smooth_pre)
if 1:
axis = visual_axes[visual[normalize_axis]]
for i in range(visual_grid.shape[axis]):
item = [slice(None, None, None), ] * len(visual_grid.shape)
item[axis] = i
item = tuple(item)
f = _parse_f(fs[i])
with np.errstate(divide='ignore', invalid='ignore'): # these are fine, we are ok with nan's in vaex
fgrid.__setitem__(item, f(grid.__getitem__(item)))
# print vmins[i], vmaxs[i]
if vmins[i] is not None and vmaxs[i] is not None:
nsubgrid = fgrid.__getitem__(item) * 1
nsubgrid -= vmins[i]
nsubgrid /= (vmaxs[i] - vmins[i])
else:
nsubgrid, vmin, vmax = n(fgrid.__getitem__(item))
vmins[i] = vmin
vmaxs[i] = vmax
# print " ", vmins[i], vmaxs[i]
ngrid.__setitem__(item, nsubgrid)
if 0: # TODO: above should be like the code below, with custom vmin and vmax
grid = visual_grid[i]
f = _parse_f(fs[i])
fgrid = f(grid)
finite_mask = np.isfinite(grid)
finite_mask = np.any(finite_mask, axis=0)
if vmin is not None and vmax is not None:
ngrid = fgrid * 1
ngrid -= vmin
ngrid /= (vmax - vmin)
ngrid = np.clip(ngrid, 0, 1)
else:
ngrid, vmin, vmax = n(fgrid)
# vmin, vmax = np.nanmin(fgrid), np.nanmax(fgrid)
# every 'what', should have its own colorbar, check if what corresponds to
# rows or columns in facets, if so, do a colorbar per row or per column
rows, columns = int(math.ceil(facets / float(facet_columns))), facet_columns
colorbar_location = "individual"
if visual["what"] == "row" and visual_grid.shape[1] == facet_columns:
colorbar_location = "per_row"
if visual["what"] == "column" and visual_grid.shape[0] == facet_rows:
colorbar_location = "per_column"
# values = np.linspace(facet_limits[0], facet_limits[1], facet_count+1)
logger.debug("rows: %r, columns: %r", rows, columns)
import matplotlib.gridspec as gridspec
column_scale = 1
row_scale = 1
row_offset = 0
if facets > 1:
if colorbar_location == "per_row":
column_scale = 4
gs = gridspec.GridSpec(rows, columns * column_scale + 1)
elif colorbar_location == "per_column":
row_offset = 1
row_scale = 4
gs = gridspec.GridSpec(rows * row_scale + 1, columns)
else:
gs = gridspec.GridSpec(rows, columns)
facet_index = 0
fs = _expand(f, len(whats))
colormaps = _expand(colormap, len(whats))
# row
for i in range(visual_grid.shape[0]):
# column
for j in range(visual_grid.shape[1]):
if colorbar and colorbar_location == "per_column" and i == 0:
norm = matplotlib.colors.Normalize(vmins[j], vmaxs[j])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[j])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[0, j])
colorbar = fig.colorbar(sm, cax=ax, orientation="horizontal")
else:
colorbar = fig.colorbar(sm)
if "what" in labels:
label = labels["what"][j]
if facets > 1:
colorbar.ax.set_title(label)
else:
colorbar.ax.set_ylabel(colorbar_label or label)
if colorbar and colorbar_location == "per_row" and j == 0:
norm = matplotlib.colors.Normalize(vmins[i], vmaxs[i])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[i])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[i, -1])
colorbar = fig.colorbar(sm, cax=ax)
else:
colorbar = fig.colorbar(sm)
label = labels["what"][i]
colorbar.ax.set_ylabel(colorbar_label or label)
rgrid = ngrid[i, j] * 1.
# print rgrid.shape
for k in range(rgrid.shape[0]):
for l in range(rgrid.shape[0]):
if smooth_post is not None:
rgrid[k, l] = vaex.grids.gf(rgrid, smooth_post)
if visual["what"] == "column":
what_index = j
elif visual["what"] == "row":
what_index = i
else:
what_index = 0
if visual[normalize_axis] == "column":
normalize_index = j
elif visual[normalize_axis] == "row":
normalize_index = i
else:
normalize_index = 0
for r in reduce:
r = _parse_reduction(r, colormaps[what_index], [])
rgrid = r(rgrid)
row = facet_index // facet_columns
column = facet_index % facet_columns
if colorbar and colorbar_location == "individual":
# visual_grid.shape[visual_axes[visual[normalize_axis]]]
norm = matplotlib.colors.Normalize(vmins[normalize_index], vmaxs[normalize_index])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[what_index])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[row, column])
colorbar = fig.colorbar(sm, ax=ax)
else:
colorbar = fig.colorbar(sm)
label = labels["what"][what_index]
colorbar.ax.set_ylabel(colorbar_label or label)
if facets > 1:
ax = pylab.subplot(gs[row_offset + row * row_scale:row_offset + (row + 1) * row_scale, column * column_scale:(column + 1) * column_scale])
else:
ax = pylab.gca()
axes.append(ax)
logger.debug("rgrid: %r", rgrid.shape)
plot_rgrid = rgrid
assert plot_rgrid.shape[1] == 1, "no layers supported yet"
plot_rgrid = plot_rgrid[:, 0]
if plot_rgrid.shape[0] > 1:
plot_rgrid = vaex.image.fade(plot_rgrid[::-1])
else:
plot_rgrid = plot_rgrid[0]
extend = None
if visual["subspace"] == "row":
subplot_index = i
elif visual["subspace"] == "column":
subplot_index = j
else:
subplot_index = 0
extend = np.array(xlimits[subplot_index][-2:]).flatten()
# extend = np.array(xlimits[i]).flatten()
logger.debug("plot rgrid: %r", plot_rgrid.shape)
plot_rgrid = np.transpose(plot_rgrid, (1, 0, 2))
im = ax.imshow(plot_rgrid, extent=extend.tolist(), origin="lower", aspect=aspect, interpolation=interpolation)
# v1, v2 = values[i], values[i+1]
def label(index, label, expression):
if label and _issequence(label):
return label[i]
else:
return self.label(expression)
if visual_reverse["x"] =='x':
labelsx = labels['x']
pylab.xlabel(labelsx[subplot_index])
if visual_reverse["x"] =='x':
labelsy = labels['y']
pylab.ylabel(labelsy[subplot_index])
if visual["z"] in ['row']:
labelsz = labels['z']
ax.set_title(labelsz[i])
if visual["z"] in ['column']:
labelsz = labels['z']
ax.set_title(labelsz[j])
max_labels = 10
# xexpression = xexpressions[i]
# if self.iscategory(xexpression):
# labels = self.category_labels(xexpression)
# step = len(labels) // max_labels
# pylab.xticks(np.arange(len(labels))[::step], labels[::step], size='small')
# yexpression = yexpressions[i]
# if self.iscategory(yexpression):
# labels = self.category_labels(yexpression)
# step = len(labels) // max_labels
# pylab.yticks(np.arange(len(labels))[::step], labels[::step], size='small')
facet_index += 1
if title:
fig.suptitle(title, fontsize="x-large")
if tight_layout:
if title:
pylab.tight_layout(rect=[0, 0.03, 1, 0.95])
else:
pylab.tight_layout()
if hardcopy:
pylab.savefig(hardcopy)
if show:
pylab.show()
if return_extra:
return im, grid, fgrid, ngrid, rgrid
else:
return im
|
def function[plot, parameter[self, x, y, z, what, vwhat, reduce, f, normalize, normalize_axis, vmin, vmax, shape, vshape, limits, grid, colormap, figsize, xlabel, ylabel, aspect, tight_layout, interpolation, show, colorbar, colorbar_label, selection, selection_labels, title, background_color, pre_blend, background_alpha, visual, smooth_pre, smooth_post, wrap, wrap_columns, return_extra, hardcopy]]:
constant[Viz data in a 2d histogram/heatmap.
Declarative plotting of statistical plots using matplotlib, supports subplots, selections, layers.
Instead of passing x and y, pass a list as x argument for multiple panels. Give what a list of options to have multiple
panels. When both are present then will be origanized in a column/row order.
This methods creates a 6 dimensional 'grid', where each dimension can map the a visual dimension.
The grid dimensions are:
* x: shape determined by shape, content by x argument or the first dimension of each space
* y: ,,
* z: related to the z argument
* selection: shape equals length of selection argument
* what: shape equals length of what argument
* space: shape equals length of x argument if multiple values are given
By default, this its shape is (1, 1, 1, 1, shape, shape) (where x is the last dimension)
The visual dimensions are
* x: x coordinate on a plot / image (default maps to grid's x)
* y: y ,, (default maps to grid's y)
* layer: each image in this dimension is blended togeher to one image (default maps to z)
* fade: each image is shown faded after the next image (default mapt to selection)
* row: rows of subplots (default maps to space)
* columns: columns of subplot (default maps to what)
All these mappings can be changes by the visual argument, some examples:
>>> df.plot('x', 'y', what=['mean(x)', 'correlation(vx, vy)'])
Will plot each 'what' as a column.
>>> df.plot('x', 'y', selection=['FeH < -3', '(FeH >= -3) & (FeH < -2)'], visual=dict(column='selection'))
Will plot each selection as a column, instead of a faded on top of each other.
:param x: Expression to bin in the x direction (by default maps to x), or list of pairs, like [['x', 'y'], ['x', 'z']], if multiple pairs are given, this dimension maps to rows by default
:param y: y (by default maps to y)
:param z: Expression to bin in the z direction, followed by a :start,end,shape signature, like 'FeH:-3,1:5' will produce 5 layers between -10 and 10 (by default maps to layer)
:param what: What to plot, count(*) will show a N-d histogram, mean('x'), the mean of the x column, sum('x') the sum, std('x') the standard deviation, correlation('vx', 'vy') the correlation coefficient. Can also be a list of values, like ['count(x)', std('vx')], (by default maps to column)
:param reduce:
:param f: transform values by: 'identity' does nothing 'log' or 'log10' will show the log of the value
:param normalize: normalization function, currently only 'normalize' is supported
:param normalize_axis: which axes to normalize on, None means normalize by the global maximum.
:param vmin: instead of automatic normalization, (using normalize and normalization_axis) scale the data between vmin and vmax to [0, 1]
:param vmax: see vmin
:param shape: shape/size of the n-D histogram grid
:param limits: list of [[xmin, xmax], [ymin, ymax]], or a description such as 'minmax', '99%'
:param grid: if the binning is done before by yourself, you can pass it
:param colormap: matplotlib colormap to use
:param figsize: (x, y) tuple passed to pylab.figure for setting the figure size
:param xlabel:
:param ylabel:
:param aspect:
:param tight_layout: call pylab.tight_layout or not
:param colorbar: plot a colorbar or not
:param interpolation: interpolation for imshow, possible options are: 'nearest', 'bilinear', 'bicubic', see matplotlib for more
:param return_extra:
:return:
]
import module[pylab]
import module[matplotlib]
variable[n] assign[=] call[name[_parse_n], parameter[name[normalize]]]
if compare[call[name[type], parameter[name[shape]]] equal[==] name[int]] begin[:]
variable[shape] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b0511930>]] * constant[2]]
variable[binby] assign[=] list[[]]
variable[x] assign[=] call[name[_ensure_strings_from_expressions], parameter[name[x]]]
variable[y] assign[=] call[name[_ensure_strings_from_expressions], parameter[name[y]]]
for taget[name[expression]] in starred[list[[<ast.Name object at 0x7da1b05115d0>, <ast.Name object at 0x7da1b05115a0>]]] begin[:]
if compare[name[expression] is_not constant[None]] begin[:]
variable[binby] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b05113f0>]] + name[binby]]
variable[fig] assign[=] call[name[pylab].gcf, parameter[]]
if compare[name[figsize] is_not constant[None]] begin[:]
call[name[fig].set_size_inches, parameter[<ast.Starred object at 0x7da1b0511120>]]
import module[re]
variable[what_units] assign[=] constant[None]
variable[whats] assign[=] call[name[_ensure_list], parameter[name[what]]]
variable[selections] assign[=] call[name[_ensure_list], parameter[name[selection]]]
variable[selections] assign[=] call[name[_ensure_strings_from_expressions], parameter[name[selections]]]
if compare[name[y] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b0510bb0> assign[=] call[name[vaex].utils.listify, parameter[name[x]]]
variable[vwhats] assign[=] call[name[_expand_limits], parameter[name[vwhat], call[name[len], parameter[name[x]]]]]
call[name[logger].debug, parameter[constant[x: %s], name[x]]]
<ast.Tuple object at 0x7da1b0510280> assign[=] call[name[self].limits, parameter[name[x], name[limits]]]
variable[shape] assign[=] call[name[shape]][constant[0]]
call[name[logger].debug, parameter[constant[limits: %r], name[limits]]]
variable[labels] assign[=] dictionary[[], []]
variable[shape] assign[=] call[name[_expand_shape], parameter[name[shape], constant[2]]]
variable[vshape] assign[=] call[name[_expand_shape], parameter[name[shape], constant[2]]]
if compare[name[z] is_not constant[None]] begin[:]
variable[match] assign[=] call[name[re].match, parameter[constant[(.*):(.*),(.*),(.*)], name[z]]]
if name[match] begin[:]
variable[groups] assign[=] call[name[match].groups, parameter[]]
import module[ast]
variable[z_expression] assign[=] call[name[groups]][constant[0]]
call[name[logger].debug, parameter[constant[found groups: %r], call[name[list], parameter[name[groups]]]]]
variable[z_limits] assign[=] list[[<ast.Call object at 0x7da20c6c7790>, <ast.Call object at 0x7da20c6c6470>]]
variable[z_shape] assign[=] call[name[ast].literal_eval, parameter[call[name[groups]][constant[3]]]]
variable[x] assign[=] <ast.ListComp object at 0x7da20c6c6bc0>
variable[limits] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c6c4f40>]]
variable[shape] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da20c6c7250>]] + name[shape]]
variable[vshape] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da20c6c44c0>]] + name[vshape]]
call[name[logger].debug, parameter[constant[x = %r], name[x]]]
variable[values] assign[=] call[name[np].linspace, parameter[call[name[z_limits]][constant[0]], call[name[z_limits]][constant[1]]]]
call[name[labels]][constant[z]] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da20c6c5f60>]]
if compare[name[z] is constant[None]] begin[:]
variable[total_grid] assign[=] call[name[np].zeros, parameter[binary_operation[tuple[[<ast.Call object at 0x7da20c6c5a20>, <ast.Call object at 0x7da1b03e1ed0>, <ast.Call object at 0x7da1b03e1e70>, <ast.Constant object at 0x7da1b03e1de0>]] + name[shape]]]]
variable[total_vgrid] assign[=] call[name[np].zeros, parameter[binary_operation[tuple[[<ast.Call object at 0x7da1b03e18a0>, <ast.Call object at 0x7da1b03e12d0>, <ast.Call object at 0x7da1b03e13c0>, <ast.Constant object at 0x7da1b03e1420>]] + name[vshape]]]]
call[name[logger].debug, parameter[constant[shape of total grid: %r], name[total_grid].shape]]
variable[axis] assign[=] call[name[dict], parameter[]]
variable[xlimits] assign[=] name[limits]
variable[grid_axes] assign[=] call[name[dict], parameter[]]
variable[visual_axes] assign[=] call[name[dict], parameter[]]
variable[visual_default] assign[=] call[name[dict], parameter[]]
def function[invert, parameter[x]]:
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b0528070>]]]
variable[free_visual_axes] assign[=] call[name[list], parameter[call[name[visual_default].keys, parameter[]]]]
call[name[logger].debug, parameter[constant[1: %r %r], name[visual], name[free_visual_axes]]]
for taget[tuple[[<ast.Name object at 0x7da1b052b4f0>, <ast.Name object at 0x7da1b05296f0>]]] in starred[call[name[visual].items, parameter[]]] begin[:]
if compare[name[visual_name] in name[free_visual_axes]] begin[:]
call[name[free_visual_axes].remove, parameter[name[visual_name]]]
call[name[logger].debug, parameter[constant[2: %r %r], name[visual], name[free_visual_axes]]]
for taget[tuple[[<ast.Name object at 0x7da1b052b850>, <ast.Name object at 0x7da1b0529600>]]] in starred[call[name[visual_default].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b05284f0> begin[:]
call[name[free_visual_axes].remove, parameter[name[visual_name]]]
call[name[visual]][name[visual_name]] assign[=] name[grid_name]
call[name[logger].debug, parameter[constant[3: %r %r], name[visual], name[free_visual_axes]]]
for taget[tuple[[<ast.Name object at 0x7da1b052bb20>, <ast.Name object at 0x7da1b0528d90>]]] in starred[call[name[visual_default].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b052a9e0> begin[:]
call[name[visual]][call[name[free_visual_axes].pop, parameter[constant[0]]]] assign[=] name[grid_name]
call[name[logger].debug, parameter[constant[4: %r %r], name[visual], name[free_visual_axes]]]
variable[visual_reverse] assign[=] call[name[invert], parameter[name[visual]]]
<ast.Tuple object at 0x7da1b0529630> assign[=] tuple[[<ast.Name object at 0x7da1b0529480>, <ast.Name object at 0x7da1b0528040>]]
variable[move] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b052b1f0>, <ast.Name object at 0x7da1b0529ab0>]]] in starred[call[name[visual].items, parameter[]]] begin[:]
if compare[call[name[visual_axes]][name[visual_name]] in call[name[visual].values, parameter[]]] begin[:]
variable[index] assign[=] call[call[name[visual].values, parameter[]].find, parameter[name[visual_name]]]
variable[key] assign[=] call[call[name[visual].keys, parameter[]]][name[index]]
<ast.Raise object at 0x7da1b052ad10>
call[name[move]][call[name[grid_axes]][name[grid_name]]] assign[=] call[name[visual_axes]][name[visual_name]]
variable[fs] assign[=] call[name[_expand], parameter[name[f], call[name[total_grid].shape][call[name[grid_axes]][name[normalize_axis]]]]]
variable[what_labels] assign[=] list[[]]
if compare[name[grid] is constant[None]] begin[:]
variable[grid_of_grids] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b052a4d0>, <ast.Tuple object at 0x7da1b052b310>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[x], name[xlimits]]]]]] begin[:]
call[name[grid_of_grids].append, parameter[list[[]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0528ee0>, <ast.Name object at 0x7da1b052ae30>]]] in starred[call[name[enumerate], parameter[name[whats]]]] begin[:]
if call[name[isinstance], parameter[name[what], name[vaex].stat.Expression]] begin[:]
variable[grid] assign[=] call[name[what].calculate, parameter[name[self]]]
if compare[name[i] equal[==] constant[0]] begin[:]
variable[what_label] assign[=] call[name[str], parameter[call[name[whats]][name[j]]]]
if name[what_units] begin[:]
<ast.AugAssign object at 0x7da20c6a8f70>
if call[name[fs]][name[j]] begin[:]
variable[what_label] assign[=] binary_operation[binary_operation[call[name[fs]][name[j]] + constant[ ]] + name[what_label]]
call[name[what_labels].append, parameter[name[what_label]]]
call[call[name[grid_of_grids]][<ast.UnaryOp object at 0x7da20c6ab970>].append, parameter[name[grid]]]
call[name[self].executor.execute, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6a9690>, <ast.Tuple object at 0x7da20c6a84f0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[x], name[xlimits]]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6a81c0>, <ast.Name object at 0x7da20c6ab7c0>]]] in starred[call[name[enumerate], parameter[name[whats]]]] begin[:]
variable[grid] assign[=] call[call[call[name[grid_of_grids]][name[i]]][name[j]].get, parameter[]]
call[name[total_grid]][tuple[[<ast.Name object at 0x7da20c6abdc0>, <ast.Name object at 0x7da20c6a8280>, <ast.Slice object at 0x7da20c6ab280>, <ast.Slice object at 0x7da20c6a9210>]]] assign[=] call[name[grid]][tuple[[<ast.Slice object at 0x7da20c6a9a20>, <ast.Constant object at 0x7da20c6a8df0>, <ast.Constant object at 0x7da20c6a82e0>]]]
call[name[labels]][constant[what]] assign[=] name[what_labels]
def function[_selection_name, parameter[name]]:
if compare[name[name] in list[[<ast.Constant object at 0x7da20c6a9240>, <ast.Constant object at 0x7da20c6a8520>]]] begin[:]
return[constant[selection: all]]
if compare[name[selection_labels] is constant[None]] begin[:]
call[name[labels]][constant[selection]] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da20c6aa500>]]
variable[axes] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18ede7130>]] * call[name[len], parameter[name[move]]]]
for taget[tuple[[<ast.Name object at 0x7da18ede5240>, <ast.Name object at 0x7da18ede7dc0>]]] in starred[call[name[move].items, parameter[]]] begin[:]
call[name[axes]][name[value]] assign[=] name[key]
variable[visual_grid] assign[=] call[name[np].transpose, parameter[name[total_grid], name[axes]]]
call[name[logger].debug, parameter[constant[grid shape: %r], name[total_grid].shape]]
call[name[logger].debug, parameter[constant[visual: %r], call[name[visual].items, parameter[]]]]
call[name[logger].debug, parameter[constant[move: %r], name[move]]]
call[name[logger].debug, parameter[constant[visual grid shape: %r], name[visual_grid].shape]]
variable[xexpressions] assign[=] list[[]]
variable[yexpressions] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18ede71f0>, <ast.Tuple object at 0x7da18ede71c0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[x], name[xlimits]]]]]] begin[:]
call[name[xexpressions].append, parameter[call[name[binby]][constant[0]]]]
call[name[yexpressions].append, parameter[call[name[binby]][constant[1]]]]
if compare[name[xlabel] is constant[None]] begin[:]
variable[xlabels] assign[=] list[[]]
variable[ylabels] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18ede48e0>, <ast.Tuple object at 0x7da18ede6e60>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[x], name[xlimits]]]]]] begin[:]
if compare[name[z] is_not constant[None]] begin[:]
call[name[xlabels].append, parameter[call[name[self].label, parameter[call[name[binby]][constant[1]]]]]]
call[name[ylabels].append, parameter[call[name[self].label, parameter[call[name[binby]][constant[2]]]]]]
call[name[labels]][constant[x]] assign[=] name[xlabels]
call[name[labels]][constant[y]] assign[=] name[ylabels]
variable[axes] assign[=] list[[]]
variable[background_color] assign[=] call[name[np].array, parameter[call[name[matplotlib].colors.colorConverter.to_rgb, parameter[name[background_color]]]]]
import module[math]
variable[facet_columns] assign[=] constant[None]
variable[facets] assign[=] binary_operation[call[name[visual_grid].shape][call[name[visual_axes]][constant[row]]] * call[name[visual_grid].shape][call[name[visual_axes]][constant[column]]]]
if <ast.BoolOp object at 0x7da18ede5ff0> begin[:]
variable[facet_columns] assign[=] call[name[min], parameter[name[wrap_columns], call[name[visual_grid].shape][call[name[visual_axes]][constant[row]]]]]
variable[wrapped] assign[=] constant[True]
variable[facet_rows] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[name[facets] / name[facet_columns]]]]]]
call[name[logger].debug, parameter[constant[facet_rows: %r], name[facet_rows]]]
call[name[logger].debug, parameter[constant[facet_columns: %r], name[facet_columns]]]
variable[grid] assign[=] binary_operation[name[visual_grid] * constant[1.0]]
variable[fgrid] assign[=] binary_operation[name[visual_grid] * constant[1.0]]
variable[ngrid] assign[=] binary_operation[name[visual_grid] * constant[1.0]]
variable[vmins] assign[=] call[name[_expand], parameter[name[vmin], call[name[visual_grid].shape][call[name[visual_axes]][call[name[visual]][name[normalize_axis]]]]]]
variable[vmaxs] assign[=] call[name[_expand], parameter[name[vmax], call[name[visual_grid].shape][call[name[visual_axes]][call[name[visual]][name[normalize_axis]]]]]]
name[visual_grid]
if name[smooth_pre] begin[:]
variable[grid] assign[=] call[name[vaex].grids.gf, parameter[name[grid], name[smooth_pre]]]
if constant[1] begin[:]
variable[axis] assign[=] call[name[visual_axes]][call[name[visual]][name[normalize_axis]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[visual_grid].shape][name[axis]]]]] begin[:]
variable[item] assign[=] binary_operation[list[[<ast.Call object at 0x7da18f00c580>]] * call[name[len], parameter[name[visual_grid].shape]]]
call[name[item]][name[axis]] assign[=] name[i]
variable[item] assign[=] call[name[tuple], parameter[name[item]]]
variable[f] assign[=] call[name[_parse_f], parameter[call[name[fs]][name[i]]]]
with call[name[np].errstate, parameter[]] begin[:]
call[name[fgrid].__setitem__, parameter[name[item], call[name[f], parameter[call[name[grid].__getitem__, parameter[name[item]]]]]]]
if <ast.BoolOp object at 0x7da2041dbb20> begin[:]
variable[nsubgrid] assign[=] binary_operation[call[name[fgrid].__getitem__, parameter[name[item]]] * constant[1]]
<ast.AugAssign object at 0x7da2041d90c0>
<ast.AugAssign object at 0x7da2041dbeb0>
call[name[ngrid].__setitem__, parameter[name[item], name[nsubgrid]]]
if constant[0] begin[:]
variable[grid] assign[=] call[name[visual_grid]][name[i]]
variable[f] assign[=] call[name[_parse_f], parameter[call[name[fs]][name[i]]]]
variable[fgrid] assign[=] call[name[f], parameter[name[grid]]]
variable[finite_mask] assign[=] call[name[np].isfinite, parameter[name[grid]]]
variable[finite_mask] assign[=] call[name[np].any, parameter[name[finite_mask]]]
if <ast.BoolOp object at 0x7da18f722770> begin[:]
variable[ngrid] assign[=] binary_operation[name[fgrid] * constant[1]]
<ast.AugAssign object at 0x7da18f722ad0>
<ast.AugAssign object at 0x7da18f721990>
variable[ngrid] assign[=] call[name[np].clip, parameter[name[ngrid], constant[0], constant[1]]]
<ast.Tuple object at 0x7da18f721e70> assign[=] tuple[[<ast.Call object at 0x7da18f720d30>, <ast.Name object at 0x7da18f722a10>]]
variable[colorbar_location] assign[=] constant[individual]
if <ast.BoolOp object at 0x7da18f721de0> begin[:]
variable[colorbar_location] assign[=] constant[per_row]
if <ast.BoolOp object at 0x7da18f7218d0> begin[:]
variable[colorbar_location] assign[=] constant[per_column]
call[name[logger].debug, parameter[constant[rows: %r, columns: %r], name[rows], name[columns]]]
import module[matplotlib.gridspec] as alias[gridspec]
variable[column_scale] assign[=] constant[1]
variable[row_scale] assign[=] constant[1]
variable[row_offset] assign[=] constant[0]
if compare[name[facets] greater[>] constant[1]] begin[:]
if compare[name[colorbar_location] equal[==] constant[per_row]] begin[:]
variable[column_scale] assign[=] constant[4]
variable[gs] assign[=] call[name[gridspec].GridSpec, parameter[name[rows], binary_operation[binary_operation[name[columns] * name[column_scale]] + constant[1]]]]
variable[facet_index] assign[=] constant[0]
variable[fs] assign[=] call[name[_expand], parameter[name[f], call[name[len], parameter[name[whats]]]]]
variable[colormaps] assign[=] call[name[_expand], parameter[name[colormap], call[name[len], parameter[name[whats]]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[visual_grid].shape][constant[0]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[visual_grid].shape][constant[1]]]]] begin[:]
if <ast.BoolOp object at 0x7da20c6e7c40> begin[:]
variable[norm] assign[=] call[name[matplotlib].colors.Normalize, parameter[call[name[vmins]][name[j]], call[name[vmaxs]][name[j]]]]
variable[sm] assign[=] call[name[matplotlib].cm.ScalarMappable, parameter[name[norm], call[name[colormaps]][name[j]]]]
call[name[sm].set_array, parameter[constant[1]]]
if compare[name[facets] greater[>] constant[1]] begin[:]
variable[ax] assign[=] call[name[pylab].subplot, parameter[call[name[gs]][tuple[[<ast.Constant object at 0x7da2046225f0>, <ast.Name object at 0x7da204623b50>]]]]]
variable[colorbar] assign[=] call[name[fig].colorbar, parameter[name[sm]]]
if compare[constant[what] in name[labels]] begin[:]
variable[label] assign[=] call[call[name[labels]][constant[what]]][name[j]]
if compare[name[facets] greater[>] constant[1]] begin[:]
call[name[colorbar].ax.set_title, parameter[name[label]]]
if <ast.BoolOp object at 0x7da207f03070> begin[:]
variable[norm] assign[=] call[name[matplotlib].colors.Normalize, parameter[call[name[vmins]][name[i]], call[name[vmaxs]][name[i]]]]
variable[sm] assign[=] call[name[matplotlib].cm.ScalarMappable, parameter[name[norm], call[name[colormaps]][name[i]]]]
call[name[sm].set_array, parameter[constant[1]]]
if compare[name[facets] greater[>] constant[1]] begin[:]
variable[ax] assign[=] call[name[pylab].subplot, parameter[call[name[gs]][tuple[[<ast.Name object at 0x7da207f03790>, <ast.UnaryOp object at 0x7da207f01f90>]]]]]
variable[colorbar] assign[=] call[name[fig].colorbar, parameter[name[sm]]]
variable[label] assign[=] call[call[name[labels]][constant[what]]][name[i]]
call[name[colorbar].ax.set_ylabel, parameter[<ast.BoolOp object at 0x7da207f00220>]]
variable[rgrid] assign[=] binary_operation[call[name[ngrid]][tuple[[<ast.Name object at 0x7da207f00f70>, <ast.Name object at 0x7da207f03c70>]]] * constant[1.0]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[rgrid].shape][constant[0]]]]] begin[:]
for taget[name[l]] in starred[call[name[range], parameter[call[name[rgrid].shape][constant[0]]]]] begin[:]
if compare[name[smooth_post] is_not constant[None]] begin[:]
call[name[rgrid]][tuple[[<ast.Name object at 0x7da18dc06a70>, <ast.Name object at 0x7da18dc05c30>]]] assign[=] call[name[vaex].grids.gf, parameter[name[rgrid], name[smooth_post]]]
if compare[call[name[visual]][constant[what]] equal[==] constant[column]] begin[:]
variable[what_index] assign[=] name[j]
if compare[call[name[visual]][name[normalize_axis]] equal[==] constant[column]] begin[:]
variable[normalize_index] assign[=] name[j]
for taget[name[r]] in starred[name[reduce]] begin[:]
variable[r] assign[=] call[name[_parse_reduction], parameter[name[r], call[name[colormaps]][name[what_index]], list[[]]]]
variable[rgrid] assign[=] call[name[r], parameter[name[rgrid]]]
variable[row] assign[=] binary_operation[name[facet_index] <ast.FloorDiv object at 0x7da2590d6bc0> name[facet_columns]]
variable[column] assign[=] binary_operation[name[facet_index] <ast.Mod object at 0x7da2590d6920> name[facet_columns]]
if <ast.BoolOp object at 0x7da18bc727a0> begin[:]
variable[norm] assign[=] call[name[matplotlib].colors.Normalize, parameter[call[name[vmins]][name[normalize_index]], call[name[vmaxs]][name[normalize_index]]]]
variable[sm] assign[=] call[name[matplotlib].cm.ScalarMappable, parameter[name[norm], call[name[colormaps]][name[what_index]]]]
call[name[sm].set_array, parameter[constant[1]]]
if compare[name[facets] greater[>] constant[1]] begin[:]
variable[ax] assign[=] call[name[pylab].subplot, parameter[call[name[gs]][tuple[[<ast.Name object at 0x7da18bc72cb0>, <ast.Name object at 0x7da18bc73730>]]]]]
variable[colorbar] assign[=] call[name[fig].colorbar, parameter[name[sm]]]
variable[label] assign[=] call[call[name[labels]][constant[what]]][name[what_index]]
call[name[colorbar].ax.set_ylabel, parameter[<ast.BoolOp object at 0x7da18bc73e80>]]
if compare[name[facets] greater[>] constant[1]] begin[:]
variable[ax] assign[=] call[name[pylab].subplot, parameter[call[name[gs]][tuple[[<ast.Slice object at 0x7da18bc728c0>, <ast.Slice object at 0x7da18bc737f0>]]]]]
call[name[axes].append, parameter[name[ax]]]
call[name[logger].debug, parameter[constant[rgrid: %r], name[rgrid].shape]]
variable[plot_rgrid] assign[=] name[rgrid]
assert[compare[call[name[plot_rgrid].shape][constant[1]] equal[==] constant[1]]]
variable[plot_rgrid] assign[=] call[name[plot_rgrid]][tuple[[<ast.Slice object at 0x7da18bc733a0>, <ast.Constant object at 0x7da18bc72a70>]]]
if compare[call[name[plot_rgrid].shape][constant[0]] greater[>] constant[1]] begin[:]
variable[plot_rgrid] assign[=] call[name[vaex].image.fade, parameter[call[name[plot_rgrid]][<ast.Slice object at 0x7da18bc73be0>]]]
variable[extend] assign[=] constant[None]
if compare[call[name[visual]][constant[subspace]] equal[==] constant[row]] begin[:]
variable[subplot_index] assign[=] name[i]
variable[extend] assign[=] call[call[name[np].array, parameter[call[call[name[xlimits]][name[subplot_index]]][<ast.Slice object at 0x7da18bc714b0>]]].flatten, parameter[]]
call[name[logger].debug, parameter[constant[plot rgrid: %r], name[plot_rgrid].shape]]
variable[plot_rgrid] assign[=] call[name[np].transpose, parameter[name[plot_rgrid], tuple[[<ast.Constant object at 0x7da18bc70670>, <ast.Constant object at 0x7da18bc72fb0>, <ast.Constant object at 0x7da18bc706a0>]]]]
variable[im] assign[=] call[name[ax].imshow, parameter[name[plot_rgrid]]]
def function[label, parameter[index, label, expression]]:
if <ast.BoolOp object at 0x7da18bc70550> begin[:]
return[call[name[label]][name[i]]]
if compare[call[name[visual_reverse]][constant[x]] equal[==] constant[x]] begin[:]
variable[labelsx] assign[=] call[name[labels]][constant[x]]
call[name[pylab].xlabel, parameter[call[name[labelsx]][name[subplot_index]]]]
if compare[call[name[visual_reverse]][constant[x]] equal[==] constant[x]] begin[:]
variable[labelsy] assign[=] call[name[labels]][constant[y]]
call[name[pylab].ylabel, parameter[call[name[labelsy]][name[subplot_index]]]]
if compare[call[name[visual]][constant[z]] in list[[<ast.Constant object at 0x7da18bc719c0>]]] begin[:]
variable[labelsz] assign[=] call[name[labels]][constant[z]]
call[name[ax].set_title, parameter[call[name[labelsz]][name[i]]]]
if compare[call[name[visual]][constant[z]] in list[[<ast.Constant object at 0x7da18f09d7b0>]]] begin[:]
variable[labelsz] assign[=] call[name[labels]][constant[z]]
call[name[ax].set_title, parameter[call[name[labelsz]][name[j]]]]
variable[max_labels] assign[=] constant[10]
<ast.AugAssign object at 0x7da18f09dc90>
if name[title] begin[:]
call[name[fig].suptitle, parameter[name[title]]]
if name[tight_layout] begin[:]
if name[title] begin[:]
call[name[pylab].tight_layout, parameter[]]
if name[hardcopy] begin[:]
call[name[pylab].savefig, parameter[name[hardcopy]]]
if name[show] begin[:]
call[name[pylab].show, parameter[]]
if name[return_extra] begin[:]
return[tuple[[<ast.Name object at 0x7da18f09e320>, <ast.Name object at 0x7da18f09f250>, <ast.Name object at 0x7da18f09d720>, <ast.Name object at 0x7da18f09e230>, <ast.Name object at 0x7da18f09c160>]]]
|
keyword[def] identifier[plot] ( identifier[self] , identifier[x] = keyword[None] , identifier[y] = keyword[None] , identifier[z] = keyword[None] , identifier[what] = literal[string] , identifier[vwhat] = keyword[None] , identifier[reduce] =[ literal[string] ], identifier[f] = keyword[None] ,
identifier[normalize] = literal[string] , identifier[normalize_axis] = literal[string] ,
identifier[vmin] = keyword[None] , identifier[vmax] = keyword[None] ,
identifier[shape] = literal[int] , identifier[vshape] = literal[int] , identifier[limits] = keyword[None] , identifier[grid] = keyword[None] , identifier[colormap] = literal[string] ,
identifier[figsize] = keyword[None] , identifier[xlabel] = keyword[None] , identifier[ylabel] = keyword[None] , identifier[aspect] = literal[string] , identifier[tight_layout] = keyword[True] , identifier[interpolation] = literal[string] , identifier[show] = keyword[False] ,
identifier[colorbar] = keyword[True] ,
identifier[colorbar_label] = keyword[None] ,
identifier[selection] = keyword[None] , identifier[selection_labels] = keyword[None] , identifier[title] = keyword[None] ,
identifier[background_color] = literal[string] , identifier[pre_blend] = keyword[False] , identifier[background_alpha] = literal[int] ,
identifier[visual] = identifier[dict] ( identifier[x] = literal[string] , identifier[y] = literal[string] , identifier[layer] = literal[string] , identifier[fade] = literal[string] , identifier[row] = literal[string] , identifier[column] = literal[string] ),
identifier[smooth_pre] = keyword[None] , identifier[smooth_post] = keyword[None] ,
identifier[wrap] = keyword[True] , identifier[wrap_columns] = literal[int] ,
identifier[return_extra] = keyword[False] , identifier[hardcopy] = keyword[None] ):
literal[string]
keyword[import] identifier[pylab]
keyword[import] identifier[matplotlib]
identifier[n] = identifier[_parse_n] ( identifier[normalize] )
keyword[if] identifier[type] ( identifier[shape] )== identifier[int] :
identifier[shape] =( identifier[shape] ,)* literal[int]
identifier[binby] =[]
identifier[x] = identifier[_ensure_strings_from_expressions] ( identifier[x] )
identifier[y] = identifier[_ensure_strings_from_expressions] ( identifier[y] )
keyword[for] identifier[expression] keyword[in] [ identifier[y] , identifier[x] ]:
keyword[if] identifier[expression] keyword[is] keyword[not] keyword[None] :
identifier[binby] =[ identifier[expression] ]+ identifier[binby]
identifier[fig] = identifier[pylab] . identifier[gcf] ()
keyword[if] identifier[figsize] keyword[is] keyword[not] keyword[None] :
identifier[fig] . identifier[set_size_inches] (* identifier[figsize] )
keyword[import] identifier[re]
identifier[what_units] = keyword[None]
identifier[whats] = identifier[_ensure_list] ( identifier[what] )
identifier[selections] = identifier[_ensure_list] ( identifier[selection] )
identifier[selections] = identifier[_ensure_strings_from_expressions] ( identifier[selections] )
keyword[if] identifier[y] keyword[is] keyword[None] :
identifier[waslist] ,[ identifier[x] ,]= identifier[vaex] . identifier[utils] . identifier[listify] ( identifier[x] )
keyword[else] :
identifier[waslist] ,[ identifier[x] , identifier[y] ]= identifier[vaex] . identifier[utils] . identifier[listify] ( identifier[x] , identifier[y] )
identifier[x] = identifier[list] ( identifier[zip] ( identifier[x] , identifier[y] ))
identifier[limits] =[ identifier[limits] ]
identifier[vwhats] = identifier[_expand_limits] ( identifier[vwhat] , identifier[len] ( identifier[x] ))
identifier[logger] . identifier[debug] ( literal[string] , identifier[x] )
identifier[limits] , identifier[shape] = identifier[self] . identifier[limits] ( identifier[x] , identifier[limits] , identifier[shape] = identifier[shape] )
identifier[shape] = identifier[shape] [ literal[int] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[limits] )
identifier[labels] ={}
identifier[shape] = identifier[_expand_shape] ( identifier[shape] , literal[int] )
identifier[vshape] = identifier[_expand_shape] ( identifier[shape] , literal[int] )
keyword[if] identifier[z] keyword[is] keyword[not] keyword[None] :
identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[z] )
keyword[if] identifier[match] :
identifier[groups] = identifier[match] . identifier[groups] ()
keyword[import] identifier[ast]
identifier[z_expression] = identifier[groups] [ literal[int] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[list] ( identifier[groups] ))
identifier[z_limits] =[ identifier[ast] . identifier[literal_eval] ( identifier[groups] [ literal[int] ]), identifier[ast] . identifier[literal_eval] ( identifier[groups] [ literal[int] ])]
identifier[z_shape] = identifier[ast] . identifier[literal_eval] ( identifier[groups] [ literal[int] ])
identifier[x] =[[ identifier[z_expression] ]+ identifier[list] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[x] ]
identifier[limits] = identifier[np] . identifier[array] ([[ identifier[z_limits] ]+ identifier[list] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[limits] ])
identifier[shape] =( identifier[z_shape] ,)+ identifier[shape]
identifier[vshape] =( identifier[z_shape] ,)+ identifier[vshape]
identifier[logger] . identifier[debug] ( literal[string] , identifier[x] )
identifier[values] = identifier[np] . identifier[linspace] ( identifier[z_limits] [ literal[int] ], identifier[z_limits] [ literal[int] ], identifier[num] = identifier[z_shape] + literal[int] )
identifier[labels] [ literal[string] ]= identifier[list] ([ literal[string] %( identifier[v1] , identifier[z_expression] , identifier[v2] ) keyword[for] identifier[v1] , identifier[v2] keyword[in] identifier[zip] ( identifier[values] [:- literal[int] ], identifier[values] [ literal[int] :])])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[facet] )
keyword[else] :
identifier[z_shape] = literal[int]
keyword[if] identifier[z] keyword[is] keyword[None] :
identifier[total_grid] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[x] ), identifier[len] ( identifier[whats] ), identifier[len] ( identifier[selections] ), literal[int] )+ identifier[shape] , identifier[dtype] = identifier[float] )
identifier[total_vgrid] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[x] ), identifier[len] ( identifier[whats] ), identifier[len] ( identifier[selections] ), literal[int] )+ identifier[vshape] , identifier[dtype] = identifier[float] )
keyword[else] :
identifier[total_grid] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[x] ), identifier[len] ( identifier[whats] ), identifier[len] ( identifier[selections] ))+ identifier[shape] , identifier[dtype] = identifier[float] )
identifier[total_vgrid] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[x] ), identifier[len] ( identifier[whats] ), identifier[len] ( identifier[selections] ))+ identifier[vshape] , identifier[dtype] = identifier[float] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[total_grid] . identifier[shape] )
identifier[axis] = identifier[dict] ( identifier[plot] = literal[int] , identifier[what] = literal[int] , identifier[selection] = literal[int] )
identifier[xlimits] = identifier[limits]
identifier[grid_axes] = identifier[dict] ( identifier[x] =- literal[int] , identifier[y] =- literal[int] , identifier[z] =- literal[int] , identifier[selection] =- literal[int] , identifier[what] =- literal[int] , identifier[subspace] =- literal[int] )
identifier[visual_axes] = identifier[dict] ( identifier[x] =- literal[int] , identifier[y] =- literal[int] , identifier[layer] =- literal[int] , identifier[fade] =- literal[int] , identifier[column] =- literal[int] , identifier[row] =- literal[int] )
identifier[visual_default] = identifier[dict] ( identifier[x] = literal[string] , identifier[y] = literal[string] , identifier[layer] = literal[string] , identifier[fade] = literal[string] , identifier[row] = literal[string] , identifier[column] = literal[string] )
keyword[def] identifier[invert] ( identifier[x] ): keyword[return] identifier[dict] (( identifier[v] , identifier[k] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[x] . identifier[items] ())
identifier[free_visual_axes] = identifier[list] ( identifier[visual_default] . identifier[keys] ())
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual] , identifier[free_visual_axes] )
keyword[for] identifier[visual_name] , identifier[grid_name] keyword[in] identifier[visual] . identifier[items] ():
keyword[if] identifier[visual_name] keyword[in] identifier[free_visual_axes] :
identifier[free_visual_axes] . identifier[remove] ( identifier[visual_name] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[visual_name] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual] , identifier[free_visual_axes] )
keyword[for] identifier[visual_name] , identifier[grid_name] keyword[in] identifier[visual_default] . identifier[items] ():
keyword[if] identifier[visual_name] keyword[in] identifier[free_visual_axes] keyword[and] identifier[grid_name] keyword[not] keyword[in] identifier[visual] . identifier[values] ():
identifier[free_visual_axes] . identifier[remove] ( identifier[visual_name] )
identifier[visual] [ identifier[visual_name] ]= identifier[grid_name]
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual] , identifier[free_visual_axes] )
keyword[for] identifier[visual_name] , identifier[grid_name] keyword[in] identifier[visual_default] . identifier[items] ():
keyword[if] identifier[visual_name] keyword[not] keyword[in] identifier[free_visual_axes] keyword[and] identifier[grid_name] keyword[not] keyword[in] identifier[visual] . identifier[values] ():
identifier[visual] [ identifier[free_visual_axes] . identifier[pop] ( literal[int] )]= identifier[grid_name]
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual] , identifier[free_visual_axes] )
identifier[visual_reverse] = identifier[invert] ( identifier[visual] )
identifier[visual] , identifier[visual_reverse] = identifier[visual_reverse] , identifier[visual]
identifier[move] ={}
keyword[for] identifier[grid_name] , identifier[visual_name] keyword[in] identifier[visual] . identifier[items] ():
keyword[if] identifier[visual_axes] [ identifier[visual_name] ] keyword[in] identifier[visual] . identifier[values] ():
identifier[index] = identifier[visual] . identifier[values] (). identifier[find] ( identifier[visual_name] )
identifier[key] = identifier[visual] . identifier[keys] ()[ identifier[index] ]
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[grid_name] , identifier[visual_name] , identifier[key] ))
identifier[move] [ identifier[grid_axes] [ identifier[grid_name] ]]= identifier[visual_axes] [ identifier[visual_name] ]
identifier[fs] = identifier[_expand] ( identifier[f] , identifier[total_grid] . identifier[shape] [ identifier[grid_axes] [ identifier[normalize_axis] ]])
identifier[what_labels] =[]
keyword[if] identifier[grid] keyword[is] keyword[None] :
identifier[grid_of_grids] =[]
keyword[for] identifier[i] ,( identifier[binby] , identifier[limits] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[x] , identifier[xlimits] )):
identifier[grid_of_grids] . identifier[append] ([])
keyword[for] identifier[j] , identifier[what] keyword[in] identifier[enumerate] ( identifier[whats] ):
keyword[if] identifier[isinstance] ( identifier[what] , identifier[vaex] . identifier[stat] . identifier[Expression] ):
identifier[grid] = identifier[what] . identifier[calculate] ( identifier[self] , identifier[binby] = identifier[binby] , identifier[shape] = identifier[shape] , identifier[limits] = identifier[limits] , identifier[selection] = identifier[selections] , identifier[delay] = keyword[True] )
keyword[else] :
identifier[what] = identifier[what] . identifier[strip] ()
identifier[index] = identifier[what] . identifier[index] ( literal[string] )
keyword[import] identifier[re]
identifier[groups] = identifier[re] . identifier[match] ( literal[string] , identifier[what] ). identifier[groups] ()
keyword[if] identifier[groups] keyword[and] identifier[len] ( identifier[groups] )== literal[int] :
identifier[function] = identifier[groups] [ literal[int] ]
identifier[arguments] = identifier[groups] [ literal[int] ]. identifier[strip] ()
keyword[if] literal[string] keyword[in] identifier[arguments] :
identifier[arguments] = identifier[arguments] . identifier[split] ( literal[string] )
identifier[functions] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[unit_expression] = keyword[None]
keyword[if] identifier[function] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[unit_expression] = identifier[arguments]
keyword[if] identifier[function] keyword[in] [ literal[string] ]:
identifier[unit_expression] = literal[string] %( identifier[arguments] , identifier[arguments] )
keyword[if] identifier[function] keyword[in] [ literal[string] ]:
identifier[unit_expression] = literal[string] % identifier[arguments]
keyword[if] identifier[unit_expression] :
identifier[unit] = identifier[self] . identifier[unit] ( identifier[unit_expression] )
keyword[if] identifier[unit] :
identifier[what_units] = identifier[unit] . identifier[to_string] ( literal[string] )
keyword[if] identifier[function] keyword[in] identifier[functions] :
identifier[grid] = identifier[getattr] ( identifier[self] , identifier[function] )( identifier[arguments] , identifier[binby] = identifier[binby] , identifier[limits] = identifier[limits] , identifier[shape] = identifier[shape] , identifier[selection] = identifier[selections] , identifier[delay] = keyword[True] )
keyword[elif] identifier[function] == literal[string] :
identifier[grid] = identifier[self] . identifier[count] ( identifier[arguments] , identifier[binby] , identifier[shape] = identifier[shape] , identifier[limits] = identifier[limits] , identifier[selection] = identifier[selections] , identifier[delay] = keyword[True] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[function] , identifier[functions] ))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[what] )
keyword[if] identifier[i] == literal[int] :
identifier[what_label] = identifier[str] ( identifier[whats] [ identifier[j] ])
keyword[if] identifier[what_units] :
identifier[what_label] += literal[string] % identifier[what_units]
keyword[if] identifier[fs] [ identifier[j] ]:
identifier[what_label] = identifier[fs] [ identifier[j] ]+ literal[string] + identifier[what_label]
identifier[what_labels] . identifier[append] ( identifier[what_label] )
identifier[grid_of_grids] [- literal[int] ]. identifier[append] ( identifier[grid] )
identifier[self] . identifier[executor] . identifier[execute] ()
keyword[for] identifier[i] ,( identifier[binby] , identifier[limits] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[x] , identifier[xlimits] )):
keyword[for] identifier[j] , identifier[what] keyword[in] identifier[enumerate] ( identifier[whats] ):
identifier[grid] = identifier[grid_of_grids] [ identifier[i] ][ identifier[j] ]. identifier[get] ()
identifier[total_grid] [ identifier[i] , identifier[j] ,:,:]= identifier[grid] [:, keyword[None] ,...]
identifier[labels] [ literal[string] ]= identifier[what_labels]
keyword[else] :
identifier[dims_left] = literal[int] - identifier[len] ( identifier[grid] . identifier[shape] )
identifier[total_grid] = identifier[np] . identifier[broadcast_to] ( identifier[grid] ,( literal[int] ,)* identifier[dims_left] + identifier[grid] . identifier[shape] )
keyword[def] identifier[_selection_name] ( identifier[name] ):
keyword[if] identifier[name] keyword[in] [ keyword[None] , keyword[False] ]:
keyword[return] literal[string]
keyword[elif] identifier[name] keyword[in] [ literal[string] , keyword[True] ]:
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string] % identifier[name]
keyword[if] identifier[selection_labels] keyword[is] keyword[None] :
identifier[labels] [ literal[string] ]= identifier[list] ([ identifier[_selection_name] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[selections] ])
keyword[else] :
identifier[labels] [ literal[string] ]= identifier[selection_labels]
identifier[axes] =[ keyword[None] ]* identifier[len] ( identifier[move] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[move] . identifier[items] ():
identifier[axes] [ identifier[value] ]= identifier[key]
identifier[visual_grid] = identifier[np] . identifier[transpose] ( identifier[total_grid] , identifier[axes] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[total_grid] . identifier[shape] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual] . identifier[items] ())
identifier[logger] . identifier[debug] ( literal[string] , identifier[move] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[visual_grid] . identifier[shape] )
identifier[xexpressions] =[]
identifier[yexpressions] =[]
keyword[for] identifier[i] ,( identifier[binby] , identifier[limits] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[x] , identifier[xlimits] )):
identifier[xexpressions] . identifier[append] ( identifier[binby] [ literal[int] ])
identifier[yexpressions] . identifier[append] ( identifier[binby] [ literal[int] ])
keyword[if] identifier[xlabel] keyword[is] keyword[None] :
identifier[xlabels] =[]
identifier[ylabels] =[]
keyword[for] identifier[i] ,( identifier[binby] , identifier[limits] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[x] , identifier[xlimits] )):
keyword[if] identifier[z] keyword[is] keyword[not] keyword[None] :
identifier[xlabels] . identifier[append] ( identifier[self] . identifier[label] ( identifier[binby] [ literal[int] ]))
identifier[ylabels] . identifier[append] ( identifier[self] . identifier[label] ( identifier[binby] [ literal[int] ]))
keyword[else] :
identifier[xlabels] . identifier[append] ( identifier[self] . identifier[label] ( identifier[binby] [ literal[int] ]))
identifier[ylabels] . identifier[append] ( identifier[self] . identifier[label] ( identifier[binby] [ literal[int] ]))
keyword[else] :
identifier[Nl] = identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]
identifier[xlabels] = identifier[_expand] ( identifier[xlabel] , identifier[Nl] )
identifier[ylabels] = identifier[_expand] ( identifier[ylabel] , identifier[Nl] )
identifier[labels] [ literal[string] ]= identifier[xlabels]
identifier[labels] [ literal[string] ]= identifier[ylabels]
identifier[axes] =[]
identifier[background_color] = identifier[np] . identifier[array] ( identifier[matplotlib] . identifier[colors] . identifier[colorConverter] . identifier[to_rgb] ( identifier[background_color] ))
keyword[import] identifier[math]
identifier[facet_columns] = keyword[None]
identifier[facets] = identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]* identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]
keyword[if] identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]== literal[int] keyword[and] identifier[wrap] :
identifier[facet_columns] = identifier[min] ( identifier[wrap_columns] , identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]])
identifier[wrapped] = keyword[True]
keyword[elif] identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]== literal[int] keyword[and] identifier[wrap] :
identifier[facet_columns] = identifier[min] ( identifier[wrap_columns] , identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]])
identifier[wrapped] = keyword[True]
keyword[else] :
identifier[wrapped] = keyword[False]
identifier[facet_columns] = identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ literal[string] ]]
identifier[facet_rows] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[facets] / identifier[facet_columns] ))
identifier[logger] . identifier[debug] ( literal[string] , identifier[facet_rows] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[facet_columns] )
identifier[grid] = identifier[visual_grid] * literal[int]
identifier[fgrid] = identifier[visual_grid] * literal[int]
identifier[ngrid] = identifier[visual_grid] * literal[int]
identifier[vmins] = identifier[_expand] ( identifier[vmin] , identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ identifier[visual] [ identifier[normalize_axis] ]]], identifier[type] = identifier[list] )
identifier[vmaxs] = identifier[_expand] ( identifier[vmax] , identifier[visual_grid] . identifier[shape] [ identifier[visual_axes] [ identifier[visual] [ identifier[normalize_axis] ]]], identifier[type] = identifier[list] )
identifier[visual_grid]
keyword[if] identifier[smooth_pre] :
identifier[grid] = identifier[vaex] . identifier[grids] . identifier[gf] ( identifier[grid] , identifier[smooth_pre] )
keyword[if] literal[int] :
identifier[axis] = identifier[visual_axes] [ identifier[visual] [ identifier[normalize_axis] ]]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[visual_grid] . identifier[shape] [ identifier[axis] ]):
identifier[item] =[ identifier[slice] ( keyword[None] , keyword[None] , keyword[None] ),]* identifier[len] ( identifier[visual_grid] . identifier[shape] )
identifier[item] [ identifier[axis] ]= identifier[i]
identifier[item] = identifier[tuple] ( identifier[item] )
identifier[f] = identifier[_parse_f] ( identifier[fs] [ identifier[i] ])
keyword[with] identifier[np] . identifier[errstate] ( identifier[divide] = literal[string] , identifier[invalid] = literal[string] ):
identifier[fgrid] . identifier[__setitem__] ( identifier[item] , identifier[f] ( identifier[grid] . identifier[__getitem__] ( identifier[item] )))
keyword[if] identifier[vmins] [ identifier[i] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[vmaxs] [ identifier[i] ] keyword[is] keyword[not] keyword[None] :
identifier[nsubgrid] = identifier[fgrid] . identifier[__getitem__] ( identifier[item] )* literal[int]
identifier[nsubgrid] -= identifier[vmins] [ identifier[i] ]
identifier[nsubgrid] /=( identifier[vmaxs] [ identifier[i] ]- identifier[vmins] [ identifier[i] ])
keyword[else] :
identifier[nsubgrid] , identifier[vmin] , identifier[vmax] = identifier[n] ( identifier[fgrid] . identifier[__getitem__] ( identifier[item] ))
identifier[vmins] [ identifier[i] ]= identifier[vmin]
identifier[vmaxs] [ identifier[i] ]= identifier[vmax]
identifier[ngrid] . identifier[__setitem__] ( identifier[item] , identifier[nsubgrid] )
keyword[if] literal[int] :
identifier[grid] = identifier[visual_grid] [ identifier[i] ]
identifier[f] = identifier[_parse_f] ( identifier[fs] [ identifier[i] ])
identifier[fgrid] = identifier[f] ( identifier[grid] )
identifier[finite_mask] = identifier[np] . identifier[isfinite] ( identifier[grid] )
identifier[finite_mask] = identifier[np] . identifier[any] ( identifier[finite_mask] , identifier[axis] = literal[int] )
keyword[if] identifier[vmin] keyword[is] keyword[not] keyword[None] keyword[and] identifier[vmax] keyword[is] keyword[not] keyword[None] :
identifier[ngrid] = identifier[fgrid] * literal[int]
identifier[ngrid] -= identifier[vmin]
identifier[ngrid] /=( identifier[vmax] - identifier[vmin] )
identifier[ngrid] = identifier[np] . identifier[clip] ( identifier[ngrid] , literal[int] , literal[int] )
keyword[else] :
identifier[ngrid] , identifier[vmin] , identifier[vmax] = identifier[n] ( identifier[fgrid] )
identifier[rows] , identifier[columns] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[facets] / identifier[float] ( identifier[facet_columns] ))), identifier[facet_columns]
identifier[colorbar_location] = literal[string]
keyword[if] identifier[visual] [ literal[string] ]== literal[string] keyword[and] identifier[visual_grid] . identifier[shape] [ literal[int] ]== identifier[facet_columns] :
identifier[colorbar_location] = literal[string]
keyword[if] identifier[visual] [ literal[string] ]== literal[string] keyword[and] identifier[visual_grid] . identifier[shape] [ literal[int] ]== identifier[facet_rows] :
identifier[colorbar_location] = literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[rows] , identifier[columns] )
keyword[import] identifier[matplotlib] . identifier[gridspec] keyword[as] identifier[gridspec]
identifier[column_scale] = literal[int]
identifier[row_scale] = literal[int]
identifier[row_offset] = literal[int]
keyword[if] identifier[facets] > literal[int] :
keyword[if] identifier[colorbar_location] == literal[string] :
identifier[column_scale] = literal[int]
identifier[gs] = identifier[gridspec] . identifier[GridSpec] ( identifier[rows] , identifier[columns] * identifier[column_scale] + literal[int] )
keyword[elif] identifier[colorbar_location] == literal[string] :
identifier[row_offset] = literal[int]
identifier[row_scale] = literal[int]
identifier[gs] = identifier[gridspec] . identifier[GridSpec] ( identifier[rows] * identifier[row_scale] + literal[int] , identifier[columns] )
keyword[else] :
identifier[gs] = identifier[gridspec] . identifier[GridSpec] ( identifier[rows] , identifier[columns] )
identifier[facet_index] = literal[int]
identifier[fs] = identifier[_expand] ( identifier[f] , identifier[len] ( identifier[whats] ))
identifier[colormaps] = identifier[_expand] ( identifier[colormap] , identifier[len] ( identifier[whats] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[visual_grid] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[visual_grid] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[colorbar] keyword[and] identifier[colorbar_location] == literal[string] keyword[and] identifier[i] == literal[int] :
identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( identifier[vmins] [ identifier[j] ], identifier[vmaxs] [ identifier[j] ])
identifier[sm] = identifier[matplotlib] . identifier[cm] . identifier[ScalarMappable] ( identifier[norm] , identifier[colormaps] [ identifier[j] ])
identifier[sm] . identifier[set_array] ( literal[int] )
keyword[if] identifier[facets] > literal[int] :
identifier[ax] = identifier[pylab] . identifier[subplot] ( identifier[gs] [ literal[int] , identifier[j] ])
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] , identifier[cax] = identifier[ax] , identifier[orientation] = literal[string] )
keyword[else] :
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] )
keyword[if] literal[string] keyword[in] identifier[labels] :
identifier[label] = identifier[labels] [ literal[string] ][ identifier[j] ]
keyword[if] identifier[facets] > literal[int] :
identifier[colorbar] . identifier[ax] . identifier[set_title] ( identifier[label] )
keyword[else] :
identifier[colorbar] . identifier[ax] . identifier[set_ylabel] ( identifier[colorbar_label] keyword[or] identifier[label] )
keyword[if] identifier[colorbar] keyword[and] identifier[colorbar_location] == literal[string] keyword[and] identifier[j] == literal[int] :
identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( identifier[vmins] [ identifier[i] ], identifier[vmaxs] [ identifier[i] ])
identifier[sm] = identifier[matplotlib] . identifier[cm] . identifier[ScalarMappable] ( identifier[norm] , identifier[colormaps] [ identifier[i] ])
identifier[sm] . identifier[set_array] ( literal[int] )
keyword[if] identifier[facets] > literal[int] :
identifier[ax] = identifier[pylab] . identifier[subplot] ( identifier[gs] [ identifier[i] ,- literal[int] ])
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] , identifier[cax] = identifier[ax] )
keyword[else] :
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] )
identifier[label] = identifier[labels] [ literal[string] ][ identifier[i] ]
identifier[colorbar] . identifier[ax] . identifier[set_ylabel] ( identifier[colorbar_label] keyword[or] identifier[label] )
identifier[rgrid] = identifier[ngrid] [ identifier[i] , identifier[j] ]* literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[rgrid] . identifier[shape] [ literal[int] ]):
keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[rgrid] . identifier[shape] [ literal[int] ]):
keyword[if] identifier[smooth_post] keyword[is] keyword[not] keyword[None] :
identifier[rgrid] [ identifier[k] , identifier[l] ]= identifier[vaex] . identifier[grids] . identifier[gf] ( identifier[rgrid] , identifier[smooth_post] )
keyword[if] identifier[visual] [ literal[string] ]== literal[string] :
identifier[what_index] = identifier[j]
keyword[elif] identifier[visual] [ literal[string] ]== literal[string] :
identifier[what_index] = identifier[i]
keyword[else] :
identifier[what_index] = literal[int]
keyword[if] identifier[visual] [ identifier[normalize_axis] ]== literal[string] :
identifier[normalize_index] = identifier[j]
keyword[elif] identifier[visual] [ identifier[normalize_axis] ]== literal[string] :
identifier[normalize_index] = identifier[i]
keyword[else] :
identifier[normalize_index] = literal[int]
keyword[for] identifier[r] keyword[in] identifier[reduce] :
identifier[r] = identifier[_parse_reduction] ( identifier[r] , identifier[colormaps] [ identifier[what_index] ],[])
identifier[rgrid] = identifier[r] ( identifier[rgrid] )
identifier[row] = identifier[facet_index] // identifier[facet_columns]
identifier[column] = identifier[facet_index] % identifier[facet_columns]
keyword[if] identifier[colorbar] keyword[and] identifier[colorbar_location] == literal[string] :
identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( identifier[vmins] [ identifier[normalize_index] ], identifier[vmaxs] [ identifier[normalize_index] ])
identifier[sm] = identifier[matplotlib] . identifier[cm] . identifier[ScalarMappable] ( identifier[norm] , identifier[colormaps] [ identifier[what_index] ])
identifier[sm] . identifier[set_array] ( literal[int] )
keyword[if] identifier[facets] > literal[int] :
identifier[ax] = identifier[pylab] . identifier[subplot] ( identifier[gs] [ identifier[row] , identifier[column] ])
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] , identifier[ax] = identifier[ax] )
keyword[else] :
identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] )
identifier[label] = identifier[labels] [ literal[string] ][ identifier[what_index] ]
identifier[colorbar] . identifier[ax] . identifier[set_ylabel] ( identifier[colorbar_label] keyword[or] identifier[label] )
keyword[if] identifier[facets] > literal[int] :
identifier[ax] = identifier[pylab] . identifier[subplot] ( identifier[gs] [ identifier[row_offset] + identifier[row] * identifier[row_scale] : identifier[row_offset] +( identifier[row] + literal[int] )* identifier[row_scale] , identifier[column] * identifier[column_scale] :( identifier[column] + literal[int] )* identifier[column_scale] ])
keyword[else] :
identifier[ax] = identifier[pylab] . identifier[gca] ()
identifier[axes] . identifier[append] ( identifier[ax] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[rgrid] . identifier[shape] )
identifier[plot_rgrid] = identifier[rgrid]
keyword[assert] identifier[plot_rgrid] . identifier[shape] [ literal[int] ]== literal[int] , literal[string]
identifier[plot_rgrid] = identifier[plot_rgrid] [:, literal[int] ]
keyword[if] identifier[plot_rgrid] . identifier[shape] [ literal[int] ]> literal[int] :
identifier[plot_rgrid] = identifier[vaex] . identifier[image] . identifier[fade] ( identifier[plot_rgrid] [::- literal[int] ])
keyword[else] :
identifier[plot_rgrid] = identifier[plot_rgrid] [ literal[int] ]
identifier[extend] = keyword[None]
keyword[if] identifier[visual] [ literal[string] ]== literal[string] :
identifier[subplot_index] = identifier[i]
keyword[elif] identifier[visual] [ literal[string] ]== literal[string] :
identifier[subplot_index] = identifier[j]
keyword[else] :
identifier[subplot_index] = literal[int]
identifier[extend] = identifier[np] . identifier[array] ( identifier[xlimits] [ identifier[subplot_index] ][- literal[int] :]). identifier[flatten] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[plot_rgrid] . identifier[shape] )
identifier[plot_rgrid] = identifier[np] . identifier[transpose] ( identifier[plot_rgrid] ,( literal[int] , literal[int] , literal[int] ))
identifier[im] = identifier[ax] . identifier[imshow] ( identifier[plot_rgrid] , identifier[extent] = identifier[extend] . identifier[tolist] (), identifier[origin] = literal[string] , identifier[aspect] = identifier[aspect] , identifier[interpolation] = identifier[interpolation] )
keyword[def] identifier[label] ( identifier[index] , identifier[label] , identifier[expression] ):
keyword[if] identifier[label] keyword[and] identifier[_issequence] ( identifier[label] ):
keyword[return] identifier[label] [ identifier[i] ]
keyword[else] :
keyword[return] identifier[self] . identifier[label] ( identifier[expression] )
keyword[if] identifier[visual_reverse] [ literal[string] ]== literal[string] :
identifier[labelsx] = identifier[labels] [ literal[string] ]
identifier[pylab] . identifier[xlabel] ( identifier[labelsx] [ identifier[subplot_index] ])
keyword[if] identifier[visual_reverse] [ literal[string] ]== literal[string] :
identifier[labelsy] = identifier[labels] [ literal[string] ]
identifier[pylab] . identifier[ylabel] ( identifier[labelsy] [ identifier[subplot_index] ])
keyword[if] identifier[visual] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[labelsz] = identifier[labels] [ literal[string] ]
identifier[ax] . identifier[set_title] ( identifier[labelsz] [ identifier[i] ])
keyword[if] identifier[visual] [ literal[string] ] keyword[in] [ literal[string] ]:
identifier[labelsz] = identifier[labels] [ literal[string] ]
identifier[ax] . identifier[set_title] ( identifier[labelsz] [ identifier[j] ])
identifier[max_labels] = literal[int]
identifier[facet_index] += literal[int]
keyword[if] identifier[title] :
identifier[fig] . identifier[suptitle] ( identifier[title] , identifier[fontsize] = literal[string] )
keyword[if] identifier[tight_layout] :
keyword[if] identifier[title] :
identifier[pylab] . identifier[tight_layout] ( identifier[rect] =[ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[else] :
identifier[pylab] . identifier[tight_layout] ()
keyword[if] identifier[hardcopy] :
identifier[pylab] . identifier[savefig] ( identifier[hardcopy] )
keyword[if] identifier[show] :
identifier[pylab] . identifier[show] ()
keyword[if] identifier[return_extra] :
keyword[return] identifier[im] , identifier[grid] , identifier[fgrid] , identifier[ngrid] , identifier[rgrid]
keyword[else] :
keyword[return] identifier[im]
|
def plot(self, x=None, y=None, z=None, what='count(*)', vwhat=None, reduce=['colormap'], f=None, normalize='normalize', normalize_axis='what', vmin=None, vmax=None, shape=256, vshape=32, limits=None, grid=None, colormap='afmhot', figsize=None, xlabel=None, ylabel=None, aspect='auto', tight_layout=True, interpolation='nearest', show=False, colorbar=True, colorbar_label=None, selection=None, selection_labels=None, title=None, background_color='white', pre_blend=False, background_alpha=1.0, visual=dict(x='x', y='y', layer='z', fade='selection', row='subspace', column='what'), smooth_pre=None, smooth_post=None, wrap=True, wrap_columns=4, return_extra=False, hardcopy=None): # colors=["red", "green", "blue"],
"Viz data in a 2d histogram/heatmap.\n\n Declarative plotting of statistical plots using matplotlib, supports subplots, selections, layers.\n\n Instead of passing x and y, pass a list as x argument for multiple panels. Give what a list of options to have multiple\n panels. When both are present then will be origanized in a column/row order.\n\n This methods creates a 6 dimensional 'grid', where each dimension can map the a visual dimension.\n The grid dimensions are:\n\n * x: shape determined by shape, content by x argument or the first dimension of each space\n * y: ,,\n * z: related to the z argument\n * selection: shape equals length of selection argument\n * what: shape equals length of what argument\n * space: shape equals length of x argument if multiple values are given\n\n By default, this its shape is (1, 1, 1, 1, shape, shape) (where x is the last dimension)\n\n The visual dimensions are\n\n * x: x coordinate on a plot / image (default maps to grid's x)\n * y: y ,, (default maps to grid's y)\n * layer: each image in this dimension is blended togeher to one image (default maps to z)\n * fade: each image is shown faded after the next image (default mapt to selection)\n * row: rows of subplots (default maps to space)\n * columns: columns of subplot (default maps to what)\n\n All these mappings can be changes by the visual argument, some examples:\n\n >>> df.plot('x', 'y', what=['mean(x)', 'correlation(vx, vy)'])\n\n Will plot each 'what' as a column.\n\n >>> df.plot('x', 'y', selection=['FeH < -3', '(FeH >= -3) & (FeH < -2)'], visual=dict(column='selection'))\n\n Will plot each selection as a column, instead of a faded on top of each other.\n\n\n\n\n\n :param x: Expression to bin in the x direction (by default maps to x), or list of pairs, like [['x', 'y'], ['x', 'z']], if multiple pairs are given, this dimension maps to rows by default\n :param y: y (by default maps to y)\n :param z: Expression to bin in the z direction, followed by a :start,end,shape signature, like 'FeH:-3,1:5' will produce 5 layers between -10 and 10 (by default maps to layer)\n :param what: What to plot, count(*) will show a N-d histogram, mean('x'), the mean of the x column, sum('x') the sum, std('x') the standard deviation, correlation('vx', 'vy') the correlation coefficient. Can also be a list of values, like ['count(x)', std('vx')], (by default maps to column)\n :param reduce:\n :param f: transform values by: 'identity' does nothing 'log' or 'log10' will show the log of the value\n :param normalize: normalization function, currently only 'normalize' is supported\n :param normalize_axis: which axes to normalize on, None means normalize by the global maximum.\n :param vmin: instead of automatic normalization, (using normalize and normalization_axis) scale the data between vmin and vmax to [0, 1]\n :param vmax: see vmin\n :param shape: shape/size of the n-D histogram grid\n :param limits: list of [[xmin, xmax], [ymin, ymax]], or a description such as 'minmax', '99%'\n :param grid: if the binning is done before by yourself, you can pass it\n :param colormap: matplotlib colormap to use\n :param figsize: (x, y) tuple passed to pylab.figure for setting the figure size\n :param xlabel:\n :param ylabel:\n :param aspect:\n :param tight_layout: call pylab.tight_layout or not\n :param colorbar: plot a colorbar or not\n :param interpolation: interpolation for imshow, possible options are: 'nearest', 'bilinear', 'bicubic', see matplotlib for more\n :param return_extra:\n :return:\n "
import pylab
import matplotlib
n = _parse_n(normalize)
if type(shape) == int:
shape = (shape,) * 2 # depends on [control=['if'], data=[]]
binby = []
x = _ensure_strings_from_expressions(x)
y = _ensure_strings_from_expressions(y)
for expression in [y, x]:
if expression is not None:
binby = [expression] + binby # depends on [control=['if'], data=['expression']] # depends on [control=['for'], data=['expression']]
fig = pylab.gcf()
if figsize is not None:
fig.set_size_inches(*figsize) # depends on [control=['if'], data=['figsize']]
import re
what_units = None
whats = _ensure_list(what)
selections = _ensure_list(selection)
selections = _ensure_strings_from_expressions(selections)
if y is None:
(waslist, [x]) = vaex.utils.listify(x) # depends on [control=['if'], data=[]]
else:
(waslist, [x, y]) = vaex.utils.listify(x, y)
x = list(zip(x, y))
limits = [limits]
# every plot has its own vwhat for now
vwhats = _expand_limits(vwhat, len(x)) # TODO: we're abusing this function..
logger.debug('x: %s', x)
(limits, shape) = self.limits(x, limits, shape=shape)
shape = shape[0]
logger.debug('limits: %r', limits)
# mapping of a grid axis to a label
labels = {}
shape = _expand_shape(shape, 2)
vshape = _expand_shape(shape, 2)
if z is not None:
match = re.match('(.*):(.*),(.*),(.*)', z)
if match:
groups = match.groups()
import ast
z_expression = groups[0]
logger.debug('found groups: %r', list(groups))
z_limits = [ast.literal_eval(groups[1]), ast.literal_eval(groups[2])]
z_shape = ast.literal_eval(groups[3])
# for pair in x:
x = [[z_expression] + list(k) for k in x]
limits = np.array([[z_limits] + list(k) for k in limits])
shape = (z_shape,) + shape
vshape = (z_shape,) + vshape
logger.debug('x = %r', x)
values = np.linspace(z_limits[0], z_limits[1], num=z_shape + 1)
labels['z'] = list(['%s <= %s < %s' % (v1, z_expression, v2) for (v1, v2) in zip(values[:-1], values[1:])]) # depends on [control=['if'], data=[]]
else:
raise ValueError("Could not understand 'z' argument %r, expected something in form: 'column:-1,10:5'" % facet) # depends on [control=['if'], data=['z']]
else:
z_shape = 1
# z == 1
if z is None:
total_grid = np.zeros((len(x), len(whats), len(selections), 1) + shape, dtype=float)
total_vgrid = np.zeros((len(x), len(whats), len(selections), 1) + vshape, dtype=float) # depends on [control=['if'], data=[]]
else:
total_grid = np.zeros((len(x), len(whats), len(selections)) + shape, dtype=float)
total_vgrid = np.zeros((len(x), len(whats), len(selections)) + vshape, dtype=float)
logger.debug('shape of total grid: %r', total_grid.shape)
axis = dict(plot=0, what=1, selection=2)
xlimits = limits
grid_axes = dict(x=-1, y=-2, z=-3, selection=-4, what=-5, subspace=-6)
visual_axes = dict(x=-1, y=-2, layer=-3, fade=-4, column=-5, row=-6)
# visual_default=dict(x="x", y="y", z="layer", selection="fade", subspace="row", what="column")
# visual: mapping of a plot axis, to a grid axis
visual_default = dict(x='x', y='y', layer='z', fade='selection', row='subspace', column='what')
def invert(x):
return dict(((v, k) for (k, v) in x.items()))
# visual_default_reverse = invert(visual_default)
# visual_ = visual_default
# visual = dict(visual) # copy for modification
# add entries to avoid mapping multiple times to the same axis
free_visual_axes = list(visual_default.keys())
# visual_reverse = invert(visual)
logger.debug('1: %r %r', visual, free_visual_axes)
for (visual_name, grid_name) in visual.items():
if visual_name in free_visual_axes:
free_visual_axes.remove(visual_name) # depends on [control=['if'], data=['visual_name', 'free_visual_axes']]
else:
raise ValueError('visual axes %s used multiple times' % visual_name) # depends on [control=['for'], data=[]]
logger.debug('2: %r %r', visual, free_visual_axes)
for (visual_name, grid_name) in visual_default.items():
if visual_name in free_visual_axes and grid_name not in visual.values():
free_visual_axes.remove(visual_name)
visual[visual_name] = grid_name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
logger.debug('3: %r %r', visual, free_visual_axes)
for (visual_name, grid_name) in visual_default.items():
if visual_name not in free_visual_axes and grid_name not in visual.values():
visual[free_visual_axes.pop(0)] = grid_name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
logger.debug('4: %r %r', visual, free_visual_axes)
visual_reverse = invert(visual)
# TODO: the meaning of visual and visual_reverse is changed below this line, super confusing
(visual, visual_reverse) = (visual_reverse, visual)
# so now, visual: mapping of a grid axis to plot axis
# visual_reverse: mapping of a grid axis to plot axis
move = {}
for (grid_name, visual_name) in visual.items():
if visual_axes[visual_name] in visual.values():
index = visual.values().find(visual_name)
key = visual.keys()[index]
raise ValueError('trying to map %s to %s while, it is already mapped by %s' % (grid_name, visual_name, key)) # depends on [control=['if'], data=[]]
move[grid_axes[grid_name]] = visual_axes[visual_name] # depends on [control=['for'], data=[]]
# normalize_axis = _ensure_list(normalize_axis)
fs = _expand(f, total_grid.shape[grid_axes[normalize_axis]])
# assert len(vwhat)
# labels["y"] = ylabels
what_labels = []
if grid is None:
grid_of_grids = []
for (i, (binby, limits)) in enumerate(zip(x, xlimits)):
grid_of_grids.append([])
for (j, what) in enumerate(whats):
if isinstance(what, vaex.stat.Expression):
grid = what.calculate(self, binby=binby, shape=shape, limits=limits, selection=selections, delay=True) # depends on [control=['if'], data=[]]
else:
what = what.strip()
index = what.index('(')
import re
groups = re.match('(.*)\\((.*)\\)', what).groups()
if groups and len(groups) == 2:
function = groups[0]
arguments = groups[1].strip()
if ',' in arguments:
arguments = arguments.split(',') # depends on [control=['if'], data=['arguments']]
functions = ['mean', 'sum', 'std', 'var', 'correlation', 'covar', 'min', 'max', 'median_approx']
unit_expression = None
if function in ['mean', 'sum', 'std', 'min', 'max', 'median']:
unit_expression = arguments # depends on [control=['if'], data=[]]
if function in ['var']:
unit_expression = '(%s) * (%s)' % (arguments, arguments) # depends on [control=['if'], data=[]]
if function in ['covar']:
unit_expression = '(%s) * (%s)' % arguments # depends on [control=['if'], data=[]]
if unit_expression:
unit = self.unit(unit_expression)
if unit:
what_units = unit.to_string('latex_inline') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if function in functions:
grid = getattr(self, function)(arguments, binby=binby, limits=limits, shape=shape, selection=selections, delay=True) # depends on [control=['if'], data=['function']]
elif function == 'count':
grid = self.count(arguments, binby, shape=shape, limits=limits, selection=selections, delay=True) # depends on [control=['if'], data=[]]
else:
raise ValueError("Could not understand method: %s, expected one of %r'" % (function, functions)) # depends on [control=['if'], data=[]]
else:
raise ValueError("Could not understand 'what' argument %r, expected something in form: 'count(*)', 'mean(x)'" % what)
if i == 0: # and j == 0:
what_label = str(whats[j])
if what_units:
what_label += ' (%s)' % what_units # depends on [control=['if'], data=[]]
if fs[j]:
what_label = fs[j] + ' ' + what_label # depends on [control=['if'], data=[]]
what_labels.append(what_label) # depends on [control=['if'], data=[]]
grid_of_grids[-1].append(grid) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
self.executor.execute()
for (i, (binby, limits)) in enumerate(zip(x, xlimits)):
for (j, what) in enumerate(whats):
grid = grid_of_grids[i][j].get()
total_grid[i, j, :, :] = grid[:, None, ...] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
labels['what'] = what_labels # depends on [control=['if'], data=['grid']]
else:
dims_left = 6 - len(grid.shape)
total_grid = np.broadcast_to(grid, (1,) * dims_left + grid.shape)
# visual=dict(x="x", y="y", selection="fade", subspace="facet1", what="facet2",)
def _selection_name(name):
if name in [None, False]:
return 'selection: all' # depends on [control=['if'], data=[]]
elif name in ['default', True]:
return 'selection: default' # depends on [control=['if'], data=[]]
else:
return 'selection: %s' % name
if selection_labels is None:
labels['selection'] = list([_selection_name(k) for k in selections]) # depends on [control=['if'], data=[]]
else:
labels['selection'] = selection_labels
# visual_grid = np.moveaxis(total_grid, move.keys(), move.values())
# np.moveaxis is in np 1.11 only?, use transpose
axes = [None] * len(move)
for (key, value) in move.items():
axes[value] = key # depends on [control=['for'], data=[]]
visual_grid = np.transpose(total_grid, axes)
logger.debug('grid shape: %r', total_grid.shape)
logger.debug('visual: %r', visual.items())
logger.debug('move: %r', move)
logger.debug('visual grid shape: %r', visual_grid.shape)
xexpressions = []
yexpressions = []
for (i, (binby, limits)) in enumerate(zip(x, xlimits)):
xexpressions.append(binby[0])
yexpressions.append(binby[1]) # depends on [control=['for'], data=[]]
if xlabel is None:
xlabels = []
ylabels = []
for (i, (binby, limits)) in enumerate(zip(x, xlimits)):
if z is not None:
xlabels.append(self.label(binby[1]))
ylabels.append(self.label(binby[2])) # depends on [control=['if'], data=[]]
else:
xlabels.append(self.label(binby[0]))
ylabels.append(self.label(binby[1])) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
Nl = visual_grid.shape[visual_axes['row']]
xlabels = _expand(xlabel, Nl)
ylabels = _expand(ylabel, Nl)
#labels[visual["x"]] = (xlabels, ylabels)
labels['x'] = xlabels
labels['y'] = ylabels
# grid = total_grid
# print(grid.shape)
# grid = self.reduce(grid, )
axes = []
# cax = pylab.subplot(1,1,1)
background_color = np.array(matplotlib.colors.colorConverter.to_rgb(background_color))
# if grid.shape[axis["selection"]] > 1:# and not facet:
# rgrid = vaex.image.fade(rgrid)
# finite_mask = np.any(finite_mask, axis=0) # do we really need this
# print(rgrid.shape)
# facet_row_axis = axis["what"]
import math
facet_columns = None
facets = visual_grid.shape[visual_axes['row']] * visual_grid.shape[visual_axes['column']]
if visual_grid.shape[visual_axes['column']] == 1 and wrap:
facet_columns = min(wrap_columns, visual_grid.shape[visual_axes['row']])
wrapped = True # depends on [control=['if'], data=[]]
elif visual_grid.shape[visual_axes['row']] == 1 and wrap:
facet_columns = min(wrap_columns, visual_grid.shape[visual_axes['column']])
wrapped = True # depends on [control=['if'], data=[]]
else:
wrapped = False
facet_columns = visual_grid.shape[visual_axes['column']]
facet_rows = int(math.ceil(facets / facet_columns))
logger.debug('facet_rows: %r', facet_rows)
logger.debug('facet_columns: %r', facet_columns)
# if visual_grid.shape[visual_axes["row"]] > 1: # and not wrap:
# #facet_row_axis = axis["what"]
# facet_columns = visual_grid.shape[visual_axes["column"]]
# else:
# facet_columns = min(wrap_columns, facets)
# if grid.shape[axis["plot"]] > 1:# and not facet:
# this loop could be done using axis arguments everywhere
# assert len(normalize_axis) == 1, "currently only 1 normalization axis supported"
grid = visual_grid * 1.0
fgrid = visual_grid * 1.0
ngrid = visual_grid * 1.0
# colorgrid = np.zeros(ngrid.shape + (4,), float)
# print "norma", normalize_axis, visual_grid.shape[visual_axes[visual[normalize_axis]]]
vmins = _expand(vmin, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list)
vmaxs = _expand(vmax, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list)
# for name in normalize_axis:
visual_grid
if smooth_pre:
grid = vaex.grids.gf(grid, smooth_pre) # depends on [control=['if'], data=[]]
if 1:
axis = visual_axes[visual[normalize_axis]]
for i in range(visual_grid.shape[axis]):
item = [slice(None, None, None)] * len(visual_grid.shape)
item[axis] = i
item = tuple(item)
f = _parse_f(fs[i])
with np.errstate(divide='ignore', invalid='ignore'): # these are fine, we are ok with nan's in vaex
fgrid.__setitem__(item, f(grid.__getitem__(item))) # depends on [control=['with'], data=[]]
# print vmins[i], vmaxs[i]
if vmins[i] is not None and vmaxs[i] is not None:
nsubgrid = fgrid.__getitem__(item) * 1
nsubgrid -= vmins[i]
nsubgrid /= vmaxs[i] - vmins[i] # depends on [control=['if'], data=[]]
else:
(nsubgrid, vmin, vmax) = n(fgrid.__getitem__(item))
vmins[i] = vmin
vmaxs[i] = vmax
# print " ", vmins[i], vmaxs[i]
ngrid.__setitem__(item, nsubgrid) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if 0: # TODO: above should be like the code below, with custom vmin and vmax
grid = visual_grid[i]
f = _parse_f(fs[i])
fgrid = f(grid)
finite_mask = np.isfinite(grid)
finite_mask = np.any(finite_mask, axis=0)
if vmin is not None and vmax is not None:
ngrid = fgrid * 1
ngrid -= vmin
ngrid /= vmax - vmin
ngrid = np.clip(ngrid, 0, 1) # depends on [control=['if'], data=[]]
else:
(ngrid, vmin, vmax) = n(fgrid) # depends on [control=['if'], data=[]]
# vmin, vmax = np.nanmin(fgrid), np.nanmax(fgrid)
# every 'what', should have its own colorbar, check if what corresponds to
# rows or columns in facets, if so, do a colorbar per row or per column
(rows, columns) = (int(math.ceil(facets / float(facet_columns))), facet_columns)
colorbar_location = 'individual'
if visual['what'] == 'row' and visual_grid.shape[1] == facet_columns:
colorbar_location = 'per_row' # depends on [control=['if'], data=[]]
if visual['what'] == 'column' and visual_grid.shape[0] == facet_rows:
colorbar_location = 'per_column' # depends on [control=['if'], data=[]]
# values = np.linspace(facet_limits[0], facet_limits[1], facet_count+1)
logger.debug('rows: %r, columns: %r', rows, columns)
import matplotlib.gridspec as gridspec
column_scale = 1
row_scale = 1
row_offset = 0
if facets > 1:
if colorbar_location == 'per_row':
column_scale = 4
gs = gridspec.GridSpec(rows, columns * column_scale + 1) # depends on [control=['if'], data=[]]
elif colorbar_location == 'per_column':
row_offset = 1
row_scale = 4
gs = gridspec.GridSpec(rows * row_scale + 1, columns) # depends on [control=['if'], data=[]]
else:
gs = gridspec.GridSpec(rows, columns) # depends on [control=['if'], data=[]]
facet_index = 0
fs = _expand(f, len(whats))
colormaps = _expand(colormap, len(whats))
# row
for i in range(visual_grid.shape[0]):
# column
for j in range(visual_grid.shape[1]):
if colorbar and colorbar_location == 'per_column' and (i == 0):
norm = matplotlib.colors.Normalize(vmins[j], vmaxs[j])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[j])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[0, j])
colorbar = fig.colorbar(sm, cax=ax, orientation='horizontal') # depends on [control=['if'], data=[]]
else:
colorbar = fig.colorbar(sm)
if 'what' in labels:
label = labels['what'][j]
if facets > 1:
colorbar.ax.set_title(label) # depends on [control=['if'], data=[]]
else:
colorbar.ax.set_ylabel(colorbar_label or label) # depends on [control=['if'], data=['labels']] # depends on [control=['if'], data=[]]
if colorbar and colorbar_location == 'per_row' and (j == 0):
norm = matplotlib.colors.Normalize(vmins[i], vmaxs[i])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[i])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[i, -1])
colorbar = fig.colorbar(sm, cax=ax) # depends on [control=['if'], data=[]]
else:
colorbar = fig.colorbar(sm)
label = labels['what'][i]
colorbar.ax.set_ylabel(colorbar_label or label) # depends on [control=['if'], data=[]]
rgrid = ngrid[i, j] * 1.0
# print rgrid.shape
for k in range(rgrid.shape[0]):
for l in range(rgrid.shape[0]):
if smooth_post is not None:
rgrid[k, l] = vaex.grids.gf(rgrid, smooth_post) # depends on [control=['if'], data=['smooth_post']] # depends on [control=['for'], data=['l']] # depends on [control=['for'], data=['k']]
if visual['what'] == 'column':
what_index = j # depends on [control=['if'], data=[]]
elif visual['what'] == 'row':
what_index = i # depends on [control=['if'], data=[]]
else:
what_index = 0
if visual[normalize_axis] == 'column':
normalize_index = j # depends on [control=['if'], data=[]]
elif visual[normalize_axis] == 'row':
normalize_index = i # depends on [control=['if'], data=[]]
else:
normalize_index = 0
for r in reduce:
r = _parse_reduction(r, colormaps[what_index], [])
rgrid = r(rgrid) # depends on [control=['for'], data=['r']]
row = facet_index // facet_columns
column = facet_index % facet_columns
if colorbar and colorbar_location == 'individual':
# visual_grid.shape[visual_axes[visual[normalize_axis]]]
norm = matplotlib.colors.Normalize(vmins[normalize_index], vmaxs[normalize_index])
sm = matplotlib.cm.ScalarMappable(norm, colormaps[what_index])
sm.set_array(1) # make matplotlib happy (strange behavious)
if facets > 1:
ax = pylab.subplot(gs[row, column])
colorbar = fig.colorbar(sm, ax=ax) # depends on [control=['if'], data=[]]
else:
colorbar = fig.colorbar(sm)
label = labels['what'][what_index]
colorbar.ax.set_ylabel(colorbar_label or label) # depends on [control=['if'], data=[]]
if facets > 1:
ax = pylab.subplot(gs[row_offset + row * row_scale:row_offset + (row + 1) * row_scale, column * column_scale:(column + 1) * column_scale]) # depends on [control=['if'], data=[]]
else:
ax = pylab.gca()
axes.append(ax)
logger.debug('rgrid: %r', rgrid.shape)
plot_rgrid = rgrid
assert plot_rgrid.shape[1] == 1, 'no layers supported yet'
plot_rgrid = plot_rgrid[:, 0]
if plot_rgrid.shape[0] > 1:
plot_rgrid = vaex.image.fade(plot_rgrid[::-1]) # depends on [control=['if'], data=[]]
else:
plot_rgrid = plot_rgrid[0]
extend = None
if visual['subspace'] == 'row':
subplot_index = i # depends on [control=['if'], data=[]]
elif visual['subspace'] == 'column':
subplot_index = j # depends on [control=['if'], data=[]]
else:
subplot_index = 0
extend = np.array(xlimits[subplot_index][-2:]).flatten()
# extend = np.array(xlimits[i]).flatten()
logger.debug('plot rgrid: %r', plot_rgrid.shape)
plot_rgrid = np.transpose(plot_rgrid, (1, 0, 2))
im = ax.imshow(plot_rgrid, extent=extend.tolist(), origin='lower', aspect=aspect, interpolation=interpolation)
# v1, v2 = values[i], values[i+1]
def label(index, label, expression):
if label and _issequence(label):
return label[i] # depends on [control=['if'], data=[]]
else:
return self.label(expression)
if visual_reverse['x'] == 'x':
labelsx = labels['x']
pylab.xlabel(labelsx[subplot_index]) # depends on [control=['if'], data=[]]
if visual_reverse['x'] == 'x':
labelsy = labels['y']
pylab.ylabel(labelsy[subplot_index]) # depends on [control=['if'], data=[]]
if visual['z'] in ['row']:
labelsz = labels['z']
ax.set_title(labelsz[i]) # depends on [control=['if'], data=[]]
if visual['z'] in ['column']:
labelsz = labels['z']
ax.set_title(labelsz[j]) # depends on [control=['if'], data=[]]
max_labels = 10
# xexpression = xexpressions[i]
# if self.iscategory(xexpression):
# labels = self.category_labels(xexpression)
# step = len(labels) // max_labels
# pylab.xticks(np.arange(len(labels))[::step], labels[::step], size='small')
# yexpression = yexpressions[i]
# if self.iscategory(yexpression):
# labels = self.category_labels(yexpression)
# step = len(labels) // max_labels
# pylab.yticks(np.arange(len(labels))[::step], labels[::step], size='small')
facet_index += 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
if title:
fig.suptitle(title, fontsize='x-large') # depends on [control=['if'], data=[]]
if tight_layout:
if title:
pylab.tight_layout(rect=[0, 0.03, 1, 0.95]) # depends on [control=['if'], data=[]]
else:
pylab.tight_layout() # depends on [control=['if'], data=[]]
if hardcopy:
pylab.savefig(hardcopy) # depends on [control=['if'], data=[]]
if show:
pylab.show() # depends on [control=['if'], data=[]]
if return_extra:
return (im, grid, fgrid, ngrid, rgrid) # depends on [control=['if'], data=[]]
else:
return im
|
def fetch_open_data(cls, ifo, start, end, sample_rate=4096,
tag=None, version=None,
format='hdf5', host=GWOSC_DEFAULT_HOST,
verbose=False, cache=None, **kwargs):
"""Fetch open-access data from the LIGO Open Science Center
Parameters
----------
ifo : `str`
the two-character prefix of the IFO in which you are interested,
e.g. `'L1'`
start : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS start time of required data, defaults to start of data found;
any input parseable by `~gwpy.time.to_gps` is fine
end : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS end time of required data, defaults to end of data found;
any input parseable by `~gwpy.time.to_gps` is fine
sample_rate : `float`, optional,
the sample rate of desired data; most data are stored
by LOSC at 4096 Hz, however there may be event-related
data releases with a 16384 Hz rate, default: `4096`
tag : `str`, optional
file tag, e.g. ``'CLN'`` to select cleaned data, or ``'C00'``
for 'raw' calibrated data.
version : `int`, optional
version of files to download, defaults to highest discovered
version
format : `str`, optional
the data format to download and parse, default: ``'h5py'``
- ``'hdf5'``
- ``'gwf'`` - requires |LDAStools.frameCPP|_
host : `str`, optional
HTTP host name of LOSC server to access
verbose : `bool`, optional, default: `False`
print verbose output while fetching data
cache : `bool`, optional
save/read a local copy of the remote URL, default: `False`;
useful if the same remote data are to be accessed multiple times.
Set `GWPY_CACHE=1` in the environment to auto-cache.
**kwargs
any other keyword arguments are passed to the `TimeSeries.read`
method that parses the file that was downloaded
Examples
--------
>>> from gwpy.timeseries import (TimeSeries, StateVector)
>>> print(TimeSeries.fetch_open_data('H1', 1126259446, 1126259478))
TimeSeries([ 2.17704028e-19, 2.08763900e-19, 2.39681183e-19,
..., 3.55365541e-20, 6.33533516e-20,
7.58121195e-20]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 0.000244140625 s,
name: Strain,
channel: None)
>>> print(StateVector.fetch_open_data('H1', 1126259446, 1126259478))
StateVector([127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 1.0 s,
name: Data quality,
channel: None,
bits: Bits(0: data present
1: passes cbc CAT1 test
2: passes cbc CAT2 test
3: passes cbc CAT3 test
4: passes burst CAT1 test
5: passes burst CAT2 test
6: passes burst CAT3 test,
channel=None,
epoch=1126259446.0))
For the `StateVector`, the naming of the bits will be
``format``-dependent, because they are recorded differently by LOSC
in different formats.
For events published in O2 and later, LOSC typically provides
multiple data sets containing the original (``'C00'``) and cleaned
(``'CLN'``) data.
To select both data sets and plot a comparison, for example:
>>> orig = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='C00')
>>> cln = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='CLN')
>>> origasd = orig.asd(fftlength=4, overlap=2)
>>> clnasd = cln.asd(fftlength=4, overlap=2)
>>> plot = origasd.plot(label='Un-cleaned')
>>> ax = plot.gca()
>>> ax.plot(clnasd, label='Cleaned')
>>> ax.set_xlim(10, 1400)
>>> ax.set_ylim(1e-24, 1e-20)
>>> ax.legend()
>>> plot.show()
Notes
-----
`StateVector` data are not available in ``txt.gz`` format.
"""
from .io.losc import fetch_losc_data
return fetch_losc_data(ifo, start, end, sample_rate=sample_rate,
tag=tag, version=version, format=format,
verbose=verbose, cache=cache,
host=host, cls=cls, **kwargs)
|
def function[fetch_open_data, parameter[cls, ifo, start, end, sample_rate, tag, version, format, host, verbose, cache]]:
constant[Fetch open-access data from the LIGO Open Science Center
Parameters
----------
ifo : `str`
the two-character prefix of the IFO in which you are interested,
e.g. `'L1'`
start : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS start time of required data, defaults to start of data found;
any input parseable by `~gwpy.time.to_gps` is fine
end : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS end time of required data, defaults to end of data found;
any input parseable by `~gwpy.time.to_gps` is fine
sample_rate : `float`, optional,
the sample rate of desired data; most data are stored
by LOSC at 4096 Hz, however there may be event-related
data releases with a 16384 Hz rate, default: `4096`
tag : `str`, optional
file tag, e.g. ``'CLN'`` to select cleaned data, or ``'C00'``
for 'raw' calibrated data.
version : `int`, optional
version of files to download, defaults to highest discovered
version
format : `str`, optional
the data format to download and parse, default: ``'h5py'``
- ``'hdf5'``
- ``'gwf'`` - requires |LDAStools.frameCPP|_
host : `str`, optional
HTTP host name of LOSC server to access
verbose : `bool`, optional, default: `False`
print verbose output while fetching data
cache : `bool`, optional
save/read a local copy of the remote URL, default: `False`;
useful if the same remote data are to be accessed multiple times.
Set `GWPY_CACHE=1` in the environment to auto-cache.
**kwargs
any other keyword arguments are passed to the `TimeSeries.read`
method that parses the file that was downloaded
Examples
--------
>>> from gwpy.timeseries import (TimeSeries, StateVector)
>>> print(TimeSeries.fetch_open_data('H1', 1126259446, 1126259478))
TimeSeries([ 2.17704028e-19, 2.08763900e-19, 2.39681183e-19,
..., 3.55365541e-20, 6.33533516e-20,
7.58121195e-20]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 0.000244140625 s,
name: Strain,
channel: None)
>>> print(StateVector.fetch_open_data('H1', 1126259446, 1126259478))
StateVector([127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 1.0 s,
name: Data quality,
channel: None,
bits: Bits(0: data present
1: passes cbc CAT1 test
2: passes cbc CAT2 test
3: passes cbc CAT3 test
4: passes burst CAT1 test
5: passes burst CAT2 test
6: passes burst CAT3 test,
channel=None,
epoch=1126259446.0))
For the `StateVector`, the naming of the bits will be
``format``-dependent, because they are recorded differently by LOSC
in different formats.
For events published in O2 and later, LOSC typically provides
multiple data sets containing the original (``'C00'``) and cleaned
(``'CLN'``) data.
To select both data sets and plot a comparison, for example:
>>> orig = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='C00')
>>> cln = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='CLN')
>>> origasd = orig.asd(fftlength=4, overlap=2)
>>> clnasd = cln.asd(fftlength=4, overlap=2)
>>> plot = origasd.plot(label='Un-cleaned')
>>> ax = plot.gca()
>>> ax.plot(clnasd, label='Cleaned')
>>> ax.set_xlim(10, 1400)
>>> ax.set_ylim(1e-24, 1e-20)
>>> ax.legend()
>>> plot.show()
Notes
-----
`StateVector` data are not available in ``txt.gz`` format.
]
from relative_module[io.losc] import module[fetch_losc_data]
return[call[name[fetch_losc_data], parameter[name[ifo], name[start], name[end]]]]
|
keyword[def] identifier[fetch_open_data] ( identifier[cls] , identifier[ifo] , identifier[start] , identifier[end] , identifier[sample_rate] = literal[int] ,
identifier[tag] = keyword[None] , identifier[version] = keyword[None] ,
identifier[format] = literal[string] , identifier[host] = identifier[GWOSC_DEFAULT_HOST] ,
identifier[verbose] = keyword[False] , identifier[cache] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[io] . identifier[losc] keyword[import] identifier[fetch_losc_data]
keyword[return] identifier[fetch_losc_data] ( identifier[ifo] , identifier[start] , identifier[end] , identifier[sample_rate] = identifier[sample_rate] ,
identifier[tag] = identifier[tag] , identifier[version] = identifier[version] , identifier[format] = identifier[format] ,
identifier[verbose] = identifier[verbose] , identifier[cache] = identifier[cache] ,
identifier[host] = identifier[host] , identifier[cls] = identifier[cls] ,** identifier[kwargs] )
|
def fetch_open_data(cls, ifo, start, end, sample_rate=4096, tag=None, version=None, format='hdf5', host=GWOSC_DEFAULT_HOST, verbose=False, cache=None, **kwargs):
"""Fetch open-access data from the LIGO Open Science Center
Parameters
----------
ifo : `str`
the two-character prefix of the IFO in which you are interested,
e.g. `'L1'`
start : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS start time of required data, defaults to start of data found;
any input parseable by `~gwpy.time.to_gps` is fine
end : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS end time of required data, defaults to end of data found;
any input parseable by `~gwpy.time.to_gps` is fine
sample_rate : `float`, optional,
the sample rate of desired data; most data are stored
by LOSC at 4096 Hz, however there may be event-related
data releases with a 16384 Hz rate, default: `4096`
tag : `str`, optional
file tag, e.g. ``'CLN'`` to select cleaned data, or ``'C00'``
for 'raw' calibrated data.
version : `int`, optional
version of files to download, defaults to highest discovered
version
format : `str`, optional
the data format to download and parse, default: ``'h5py'``
- ``'hdf5'``
- ``'gwf'`` - requires |LDAStools.frameCPP|_
host : `str`, optional
HTTP host name of LOSC server to access
verbose : `bool`, optional, default: `False`
print verbose output while fetching data
cache : `bool`, optional
save/read a local copy of the remote URL, default: `False`;
useful if the same remote data are to be accessed multiple times.
Set `GWPY_CACHE=1` in the environment to auto-cache.
**kwargs
any other keyword arguments are passed to the `TimeSeries.read`
method that parses the file that was downloaded
Examples
--------
>>> from gwpy.timeseries import (TimeSeries, StateVector)
>>> print(TimeSeries.fetch_open_data('H1', 1126259446, 1126259478))
TimeSeries([ 2.17704028e-19, 2.08763900e-19, 2.39681183e-19,
..., 3.55365541e-20, 6.33533516e-20,
7.58121195e-20]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 0.000244140625 s,
name: Strain,
channel: None)
>>> print(StateVector.fetch_open_data('H1', 1126259446, 1126259478))
StateVector([127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127,127,127,127,127,
127,127,127,127,127,127,127,127]
unit: Unit(dimensionless),
t0: 1126259446.0 s,
dt: 1.0 s,
name: Data quality,
channel: None,
bits: Bits(0: data present
1: passes cbc CAT1 test
2: passes cbc CAT2 test
3: passes cbc CAT3 test
4: passes burst CAT1 test
5: passes burst CAT2 test
6: passes burst CAT3 test,
channel=None,
epoch=1126259446.0))
For the `StateVector`, the naming of the bits will be
``format``-dependent, because they are recorded differently by LOSC
in different formats.
For events published in O2 and later, LOSC typically provides
multiple data sets containing the original (``'C00'``) and cleaned
(``'CLN'``) data.
To select both data sets and plot a comparison, for example:
>>> orig = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='C00')
>>> cln = TimeSeries.fetch_open_data('H1', 1187008870, 1187008896,
... tag='CLN')
>>> origasd = orig.asd(fftlength=4, overlap=2)
>>> clnasd = cln.asd(fftlength=4, overlap=2)
>>> plot = origasd.plot(label='Un-cleaned')
>>> ax = plot.gca()
>>> ax.plot(clnasd, label='Cleaned')
>>> ax.set_xlim(10, 1400)
>>> ax.set_ylim(1e-24, 1e-20)
>>> ax.legend()
>>> plot.show()
Notes
-----
`StateVector` data are not available in ``txt.gz`` format.
"""
from .io.losc import fetch_losc_data
return fetch_losc_data(ifo, start, end, sample_rate=sample_rate, tag=tag, version=version, format=format, verbose=verbose, cache=cache, host=host, cls=cls, **kwargs)
|
def train(train_dir=None, train_csv=None, epochs=30, batch_size=32):
"""Function responsible for running the training the model."""
if not train_dir or not os.path.exists(train_dir) or not train_csv:
warnings.warn("No train directory could be found ")
return
# Make a dataset from the local folder containing Audio data
print("\nMaking an Audio Dataset...\n")
tick = time.time()
aud_dataset = AudioFolderDataset(train_dir, train_csv=train_csv, file_format='.wav', skip_header=True)
tock = time.time()
print("Loading the dataset took ", (tock-tick), " seconds.")
print("\n=======================================\n")
print("Number of output classes = ", len(aud_dataset.synsets))
print("\nThe labels are : \n")
print(aud_dataset.synsets)
# Get the model to train
net = model.get_net(len(aud_dataset.synsets))
print("\nNeural Network = \n")
print(net)
print("\nModel - Neural Network Generated!\n")
print("=======================================\n")
#Define the loss - Softmax CE Loss
softmax_loss = gluon.loss.SoftmaxCELoss(from_logits=False, sparse_label=True)
print("Loss function initialized!\n")
print("=======================================\n")
#Define the trainer with the optimizer
trainer = gluon.Trainer(net.collect_params(), 'adadelta')
print("Optimizer - Trainer function initialized!\n")
print("=======================================\n")
print("Loading the dataset to the Gluon's OOTB Dataloader...")
#Getting the data loader out of the AudioDataset and passing the transform
from transforms import MFCC
aud_transform = MFCC()
tick = time.time()
audio_train_loader = gluon.data.DataLoader(aud_dataset.transform_first(aud_transform), batch_size=32, shuffle=True)
tock = time.time()
print("Time taken to load data and apply transform here is ", (tock-tick), " seconds.")
print("=======================================\n")
print("Starting the training....\n")
# Training loop
tick = time.time()
batch_size = batch_size
num_examples = len(aud_dataset)
for epoch in range(epochs):
cumulative_loss = 0
for data, label in audio_train_loader:
with autograd.record():
output = net(data)
loss = softmax_loss(output, label)
loss.backward()
trainer.step(batch_size)
cumulative_loss += mx.nd.sum(loss).asscalar()
if epoch%5 == 0:
train_accuracy = evaluate_accuracy(audio_train_loader, net)
print("Epoch {}. Loss: {} Train accuracy : {} ".format(epoch, cumulative_loss/num_examples, train_accuracy))
print("\n------------------------------\n")
train_accuracy = evaluate_accuracy(audio_train_loader, net)
tock = time.time()
print("\nFinal training accuracy: ", train_accuracy)
print("Training the sound classification for ", epochs, " epochs, MLP model took ", (tock-tick), " seconds")
print("====================== END ======================\n")
print("Trying to save the model parameters here...")
net.save_parameters("./net.params")
print("Saved the model parameters in current directory.")
|
def function[train, parameter[train_dir, train_csv, epochs, batch_size]]:
constant[Function responsible for running the training the model.]
if <ast.BoolOp object at 0x7da2054a7d60> begin[:]
call[name[warnings].warn, parameter[constant[No train directory could be found ]]]
return[None]
call[name[print], parameter[constant[
Making an Audio Dataset...
]]]
variable[tick] assign[=] call[name[time].time, parameter[]]
variable[aud_dataset] assign[=] call[name[AudioFolderDataset], parameter[name[train_dir]]]
variable[tock] assign[=] call[name[time].time, parameter[]]
call[name[print], parameter[constant[Loading the dataset took ], binary_operation[name[tock] - name[tick]], constant[ seconds.]]]
call[name[print], parameter[constant[
=======================================
]]]
call[name[print], parameter[constant[Number of output classes = ], call[name[len], parameter[name[aud_dataset].synsets]]]]
call[name[print], parameter[constant[
The labels are :
]]]
call[name[print], parameter[name[aud_dataset].synsets]]
variable[net] assign[=] call[name[model].get_net, parameter[call[name[len], parameter[name[aud_dataset].synsets]]]]
call[name[print], parameter[constant[
Neural Network =
]]]
call[name[print], parameter[name[net]]]
call[name[print], parameter[constant[
Model - Neural Network Generated!
]]]
call[name[print], parameter[constant[=======================================
]]]
variable[softmax_loss] assign[=] call[name[gluon].loss.SoftmaxCELoss, parameter[]]
call[name[print], parameter[constant[Loss function initialized!
]]]
call[name[print], parameter[constant[=======================================
]]]
variable[trainer] assign[=] call[name[gluon].Trainer, parameter[call[name[net].collect_params, parameter[]], constant[adadelta]]]
call[name[print], parameter[constant[Optimizer - Trainer function initialized!
]]]
call[name[print], parameter[constant[=======================================
]]]
call[name[print], parameter[constant[Loading the dataset to the Gluon's OOTB Dataloader...]]]
from relative_module[transforms] import module[MFCC]
variable[aud_transform] assign[=] call[name[MFCC], parameter[]]
variable[tick] assign[=] call[name[time].time, parameter[]]
variable[audio_train_loader] assign[=] call[name[gluon].data.DataLoader, parameter[call[name[aud_dataset].transform_first, parameter[name[aud_transform]]]]]
variable[tock] assign[=] call[name[time].time, parameter[]]
call[name[print], parameter[constant[Time taken to load data and apply transform here is ], binary_operation[name[tock] - name[tick]], constant[ seconds.]]]
call[name[print], parameter[constant[=======================================
]]]
call[name[print], parameter[constant[Starting the training....
]]]
variable[tick] assign[=] call[name[time].time, parameter[]]
variable[batch_size] assign[=] name[batch_size]
variable[num_examples] assign[=] call[name[len], parameter[name[aud_dataset]]]
for taget[name[epoch]] in starred[call[name[range], parameter[name[epochs]]]] begin[:]
variable[cumulative_loss] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b204c5b0>, <ast.Name object at 0x7da1b204fb50>]]] in starred[name[audio_train_loader]] begin[:]
with call[name[autograd].record, parameter[]] begin[:]
variable[output] assign[=] call[name[net], parameter[name[data]]]
variable[loss] assign[=] call[name[softmax_loss], parameter[name[output], name[label]]]
call[name[loss].backward, parameter[]]
call[name[trainer].step, parameter[name[batch_size]]]
<ast.AugAssign object at 0x7da1b204d720>
if compare[binary_operation[name[epoch] <ast.Mod object at 0x7da2590d6920> constant[5]] equal[==] constant[0]] begin[:]
variable[train_accuracy] assign[=] call[name[evaluate_accuracy], parameter[name[audio_train_loader], name[net]]]
call[name[print], parameter[call[constant[Epoch {}. Loss: {} Train accuracy : {} ].format, parameter[name[epoch], binary_operation[name[cumulative_loss] / name[num_examples]], name[train_accuracy]]]]]
call[name[print], parameter[constant[
------------------------------
]]]
variable[train_accuracy] assign[=] call[name[evaluate_accuracy], parameter[name[audio_train_loader], name[net]]]
variable[tock] assign[=] call[name[time].time, parameter[]]
call[name[print], parameter[constant[
Final training accuracy: ], name[train_accuracy]]]
call[name[print], parameter[constant[Training the sound classification for ], name[epochs], constant[ epochs, MLP model took ], binary_operation[name[tock] - name[tick]], constant[ seconds]]]
call[name[print], parameter[constant[====================== END ======================
]]]
call[name[print], parameter[constant[Trying to save the model parameters here...]]]
call[name[net].save_parameters, parameter[constant[./net.params]]]
call[name[print], parameter[constant[Saved the model parameters in current directory.]]]
|
keyword[def] identifier[train] ( identifier[train_dir] = keyword[None] , identifier[train_csv] = keyword[None] , identifier[epochs] = literal[int] , identifier[batch_size] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[train_dir] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[train_dir] ) keyword[or] keyword[not] identifier[train_csv] :
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[return]
identifier[print] ( literal[string] )
identifier[tick] = identifier[time] . identifier[time] ()
identifier[aud_dataset] = identifier[AudioFolderDataset] ( identifier[train_dir] , identifier[train_csv] = identifier[train_csv] , identifier[file_format] = literal[string] , identifier[skip_header] = keyword[True] )
identifier[tock] = identifier[time] . identifier[time] ()
identifier[print] ( literal[string] ,( identifier[tock] - identifier[tick] ), literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[aud_dataset] . identifier[synsets] ))
identifier[print] ( literal[string] )
identifier[print] ( identifier[aud_dataset] . identifier[synsets] )
identifier[net] = identifier[model] . identifier[get_net] ( identifier[len] ( identifier[aud_dataset] . identifier[synsets] ))
identifier[print] ( literal[string] )
identifier[print] ( identifier[net] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[softmax_loss] = identifier[gluon] . identifier[loss] . identifier[SoftmaxCELoss] ( identifier[from_logits] = keyword[False] , identifier[sparse_label] = keyword[True] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[trainer] = identifier[gluon] . identifier[Trainer] ( identifier[net] . identifier[collect_params] (), literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[from] identifier[transforms] keyword[import] identifier[MFCC]
identifier[aud_transform] = identifier[MFCC] ()
identifier[tick] = identifier[time] . identifier[time] ()
identifier[audio_train_loader] = identifier[gluon] . identifier[data] . identifier[DataLoader] ( identifier[aud_dataset] . identifier[transform_first] ( identifier[aud_transform] ), identifier[batch_size] = literal[int] , identifier[shuffle] = keyword[True] )
identifier[tock] = identifier[time] . identifier[time] ()
identifier[print] ( literal[string] ,( identifier[tock] - identifier[tick] ), literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[tick] = identifier[time] . identifier[time] ()
identifier[batch_size] = identifier[batch_size]
identifier[num_examples] = identifier[len] ( identifier[aud_dataset] )
keyword[for] identifier[epoch] keyword[in] identifier[range] ( identifier[epochs] ):
identifier[cumulative_loss] = literal[int]
keyword[for] identifier[data] , identifier[label] keyword[in] identifier[audio_train_loader] :
keyword[with] identifier[autograd] . identifier[record] ():
identifier[output] = identifier[net] ( identifier[data] )
identifier[loss] = identifier[softmax_loss] ( identifier[output] , identifier[label] )
identifier[loss] . identifier[backward] ()
identifier[trainer] . identifier[step] ( identifier[batch_size] )
identifier[cumulative_loss] += identifier[mx] . identifier[nd] . identifier[sum] ( identifier[loss] ). identifier[asscalar] ()
keyword[if] identifier[epoch] % literal[int] == literal[int] :
identifier[train_accuracy] = identifier[evaluate_accuracy] ( identifier[audio_train_loader] , identifier[net] )
identifier[print] ( literal[string] . identifier[format] ( identifier[epoch] , identifier[cumulative_loss] / identifier[num_examples] , identifier[train_accuracy] ))
identifier[print] ( literal[string] )
identifier[train_accuracy] = identifier[evaluate_accuracy] ( identifier[audio_train_loader] , identifier[net] )
identifier[tock] = identifier[time] . identifier[time] ()
identifier[print] ( literal[string] , identifier[train_accuracy] )
identifier[print] ( literal[string] , identifier[epochs] , literal[string] ,( identifier[tock] - identifier[tick] ), literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[net] . identifier[save_parameters] ( literal[string] )
identifier[print] ( literal[string] )
|
def train(train_dir=None, train_csv=None, epochs=30, batch_size=32):
"""Function responsible for running the training the model."""
if not train_dir or not os.path.exists(train_dir) or (not train_csv):
warnings.warn('No train directory could be found ')
return # depends on [control=['if'], data=[]]
# Make a dataset from the local folder containing Audio data
print('\nMaking an Audio Dataset...\n')
tick = time.time()
aud_dataset = AudioFolderDataset(train_dir, train_csv=train_csv, file_format='.wav', skip_header=True)
tock = time.time()
print('Loading the dataset took ', tock - tick, ' seconds.')
print('\n=======================================\n')
print('Number of output classes = ', len(aud_dataset.synsets))
print('\nThe labels are : \n')
print(aud_dataset.synsets)
# Get the model to train
net = model.get_net(len(aud_dataset.synsets))
print('\nNeural Network = \n')
print(net)
print('\nModel - Neural Network Generated!\n')
print('=======================================\n')
#Define the loss - Softmax CE Loss
softmax_loss = gluon.loss.SoftmaxCELoss(from_logits=False, sparse_label=True)
print('Loss function initialized!\n')
print('=======================================\n')
#Define the trainer with the optimizer
trainer = gluon.Trainer(net.collect_params(), 'adadelta')
print('Optimizer - Trainer function initialized!\n')
print('=======================================\n')
print("Loading the dataset to the Gluon's OOTB Dataloader...")
#Getting the data loader out of the AudioDataset and passing the transform
from transforms import MFCC
aud_transform = MFCC()
tick = time.time()
audio_train_loader = gluon.data.DataLoader(aud_dataset.transform_first(aud_transform), batch_size=32, shuffle=True)
tock = time.time()
print('Time taken to load data and apply transform here is ', tock - tick, ' seconds.')
print('=======================================\n')
print('Starting the training....\n')
# Training loop
tick = time.time()
batch_size = batch_size
num_examples = len(aud_dataset)
for epoch in range(epochs):
cumulative_loss = 0
for (data, label) in audio_train_loader:
with autograd.record():
output = net(data)
loss = softmax_loss(output, label) # depends on [control=['with'], data=[]]
loss.backward()
trainer.step(batch_size)
cumulative_loss += mx.nd.sum(loss).asscalar() # depends on [control=['for'], data=[]]
if epoch % 5 == 0:
train_accuracy = evaluate_accuracy(audio_train_loader, net)
print('Epoch {}. Loss: {} Train accuracy : {} '.format(epoch, cumulative_loss / num_examples, train_accuracy))
print('\n------------------------------\n') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['epoch']]
train_accuracy = evaluate_accuracy(audio_train_loader, net)
tock = time.time()
print('\nFinal training accuracy: ', train_accuracy)
print('Training the sound classification for ', epochs, ' epochs, MLP model took ', tock - tick, ' seconds')
print('====================== END ======================\n')
print('Trying to save the model parameters here...')
net.save_parameters('./net.params')
print('Saved the model parameters in current directory.')
|
def is_date(value,
minimum = None,
maximum = None,
coerce_value = False,
**kwargs):
"""Indicate whether ``value`` is a :class:`date <python:datetime.date>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`date <python:datetime.date>`. If ``False``,
will only return ``True`` if ``value`` is a date value only. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.date(value,
minimum = minimum,
maximum = maximum,
coerce_value = coerce_value,
**kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True
|
def function[is_date, parameter[value, minimum, maximum, coerce_value]]:
constant[Indicate whether ``value`` is a :class:`date <python:datetime.date>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`date <python:datetime.date>`. If ``False``,
will only return ``True`` if ``value`` is a date value only. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
]
<ast.Try object at 0x7da1b07d2fb0>
return[constant[True]]
|
keyword[def] identifier[is_date] ( identifier[value] ,
identifier[minimum] = keyword[None] ,
identifier[maximum] = keyword[None] ,
identifier[coerce_value] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[value] = identifier[validators] . identifier[date] ( identifier[value] ,
identifier[minimum] = identifier[minimum] ,
identifier[maximum] = identifier[maximum] ,
identifier[coerce_value] = identifier[coerce_value] ,
** identifier[kwargs] )
keyword[except] identifier[SyntaxError] keyword[as] identifier[error] :
keyword[raise] identifier[error]
keyword[except] identifier[Exception] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def is_date(value, minimum=None, maximum=None, coerce_value=False, **kwargs):
"""Indicate whether ``value`` is a :class:`date <python:datetime.date>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`date <python:datetime.date>`. If ``False``,
will only return ``True`` if ``value`` is a date value only. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.date(value, minimum=minimum, maximum=maximum, coerce_value=coerce_value, **kwargs) # depends on [control=['try'], data=[]]
except SyntaxError as error:
raise error # depends on [control=['except'], data=['error']]
except Exception:
return False # depends on [control=['except'], data=[]]
return True
|
def flatten_fieldsets(fieldsets):
"""Returns a list of field names from an admin fieldsets structure."""
field_names = []
for _, opts in fieldsets or ():
if 'fieldsets' in opts:
field_names += flatten_fieldsets(opts.get('fieldsets'))
else:
for field in opts.get('fields', ()):
if isinstance(field, (list, tuple)):
field_names.extend(field)
else:
field_names.append(field)
return field_names
|
def function[flatten_fieldsets, parameter[fieldsets]]:
constant[Returns a list of field names from an admin fieldsets structure.]
variable[field_names] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da204621840>, <ast.Name object at 0x7da204623340>]]] in starred[<ast.BoolOp object at 0x7da204620e80>] begin[:]
if compare[constant[fieldsets] in name[opts]] begin[:]
<ast.AugAssign object at 0x7da204622dd0>
return[name[field_names]]
|
keyword[def] identifier[flatten_fieldsets] ( identifier[fieldsets] ):
literal[string]
identifier[field_names] =[]
keyword[for] identifier[_] , identifier[opts] keyword[in] identifier[fieldsets] keyword[or] ():
keyword[if] literal[string] keyword[in] identifier[opts] :
identifier[field_names] += identifier[flatten_fieldsets] ( identifier[opts] . identifier[get] ( literal[string] ))
keyword[else] :
keyword[for] identifier[field] keyword[in] identifier[opts] . identifier[get] ( literal[string] ,()):
keyword[if] identifier[isinstance] ( identifier[field] ,( identifier[list] , identifier[tuple] )):
identifier[field_names] . identifier[extend] ( identifier[field] )
keyword[else] :
identifier[field_names] . identifier[append] ( identifier[field] )
keyword[return] identifier[field_names]
|
def flatten_fieldsets(fieldsets):
"""Returns a list of field names from an admin fieldsets structure."""
field_names = []
for (_, opts) in fieldsets or ():
if 'fieldsets' in opts:
field_names += flatten_fieldsets(opts.get('fieldsets')) # depends on [control=['if'], data=['opts']]
else:
for field in opts.get('fields', ()):
if isinstance(field, (list, tuple)):
field_names.extend(field) # depends on [control=['if'], data=[]]
else:
field_names.append(field) # depends on [control=['for'], data=['field']] # depends on [control=['for'], data=[]]
return field_names
|
def exit_on_error(self, message, exit_code=1):
# pylint: disable=no-self-use
"""Log generic message when getting an error and exit
:param exit_code: if not None, exit with the provided value as exit code
:type exit_code: int
:param message: message for the exit reason
:type message: str
:return: None
"""
log = "I got an unrecoverable error. I have to exit."
if message:
log += "\n-----\nError message: %s" % message
print("Error message: %s" % message)
log += "-----\n"
log += "You can get help at https://github.com/Alignak-monitoring/alignak\n"
log += "If you think this is a bug, create a new issue including as much " \
"details as possible (version, configuration,...)"
if exit_code is not None:
exit(exit_code)
|
def function[exit_on_error, parameter[self, message, exit_code]]:
constant[Log generic message when getting an error and exit
:param exit_code: if not None, exit with the provided value as exit code
:type exit_code: int
:param message: message for the exit reason
:type message: str
:return: None
]
variable[log] assign[=] constant[I got an unrecoverable error. I have to exit.]
if name[message] begin[:]
<ast.AugAssign object at 0x7da18fe937c0>
call[name[print], parameter[binary_operation[constant[Error message: %s] <ast.Mod object at 0x7da2590d6920> name[message]]]]
<ast.AugAssign object at 0x7da18fe900a0>
<ast.AugAssign object at 0x7da18fe908e0>
<ast.AugAssign object at 0x7da18fe93a90>
if compare[name[exit_code] is_not constant[None]] begin[:]
call[name[exit], parameter[name[exit_code]]]
|
keyword[def] identifier[exit_on_error] ( identifier[self] , identifier[message] , identifier[exit_code] = literal[int] ):
literal[string]
identifier[log] = literal[string]
keyword[if] identifier[message] :
identifier[log] += literal[string] % identifier[message]
identifier[print] ( literal[string] % identifier[message] )
identifier[log] += literal[string]
identifier[log] += literal[string]
identifier[log] += literal[string] literal[string]
keyword[if] identifier[exit_code] keyword[is] keyword[not] keyword[None] :
identifier[exit] ( identifier[exit_code] )
|
def exit_on_error(self, message, exit_code=1):
# pylint: disable=no-self-use
'Log generic message when getting an error and exit\n\n :param exit_code: if not None, exit with the provided value as exit code\n :type exit_code: int\n :param message: message for the exit reason\n :type message: str\n :return: None\n '
log = 'I got an unrecoverable error. I have to exit.'
if message:
log += '\n-----\nError message: %s' % message
print('Error message: %s' % message) # depends on [control=['if'], data=[]]
log += '-----\n'
log += 'You can get help at https://github.com/Alignak-monitoring/alignak\n'
log += 'If you think this is a bug, create a new issue including as much details as possible (version, configuration,...)'
if exit_code is not None:
exit(exit_code) # depends on [control=['if'], data=['exit_code']]
|
def main(command_line=True, **kwargs):
"""
NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files
"""
#
# initialize variables
#
mag_file = ''
specnum = 0
ub_file, samp_file, or_con, corr, meas_file = "", "er_samples.txt", "3", "1", "magic_measurements.txt"
pos_file, site_file = "", "er_sites.txt"
noave = 1
args = sys.argv
bed_dip, bed_dip_dir = "", ""
samp_con, Z, average_bedding = "2", 1, "0"
meths = 'FS-FD'
sclass, lithology, _type = "", "", ""
user, inst = "", ""
DecCorr = 0.
location_name = "unknown"
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
gmeths = ""
#
#
dir_path = '.'
if command_line:
if '-WD' in args:
ind = args.index("-WD")
dir_path = sys.argv[ind + 1]
if "-h" in args:
print(main.__doc__)
return False
if "-f" in args:
ind = args.index("-f")
mag_file = sys.argv[ind + 1]
if "-fpos" in args:
ind = args.index("-fpos")
pos_file = sys.argv[ind + 1]
if "-F" in args:
ind = args.index("-F")
meas_file = sys.argv[ind + 1]
if "-Fsa" in args:
ind = args.index("-Fsa")
samp_file = sys.argv[ind + 1]
if "-Fsi" in args:
ind = args.index("-Fsi")
site_file = sys.argv[ind + 1]
if "-ocn" in args:
ind = args.index("-ocn")
or_con = sys.argv[ind + 1]
if "-ncn" in args:
ind = args.index("-ncn")
samp_con = sys.argv[ind + 1]
if "-mcd" in args:
ind = args.index("-mcd")
gmeths = (sys.argv[ind + 1])
if "-loc" in args:
ind = args.index("-loc")
location_name = (sys.argv[ind + 1])
if "-spc" in args:
ind = args.index("-spc")
specnum = int(args[ind + 1])
if "-ins" in args:
ind = args.index("-ins")
inst = args[ind + 1]
if "-a" in args:
noave = 0
#
ID = False
if '-ID' in args:
ind = args.index('-ID')
ID = args[ind + 1]
#
if not command_line:
dir_path = kwargs.get('dir_path', '.')
mag_file = kwargs.get('mag_file', '')
pos_file = kwargs.get('pos_file', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
site_file = kwargs.get('site_file', 'er_sites.txt')
or_con = kwargs.get('or_con', '3')
samp_con = kwargs.get('samp_con', '2')
corr = kwargs.get('corr', '1')
gmeths = kwargs.get('gmeths', '')
location_name = kwargs.get('location_name', '')
specnum = int(kwargs.get('specnum', 0))
inst = kwargs.get('inst', '')
noave = kwargs.get('noave', 1) # default is DO average
ID = kwargs.get('ID', '')
# format and fix variables acquired from command line args or input with
# **kwargs
if specnum != 0:
specnum = -specnum
if ID:
input_dir_path = ID
else:
input_dir_path = dir_path
if samp_con:
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "option [4] must be in form 4-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "4"
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "option [7] must be in form 7-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "7"
if "6" in samp_con:
print('Naming convention option [6] not currently supported')
return False, 'Naming convention option [6] not currently supported'
try:
Samps, file_type = pmag.magic_read(
os.path.join(input_dir_path, 'er_samples.txt'))
except:
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if file_type == 'bad_file':
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if not mag_file:
print("mag file is required input")
return False, "mag file is required input"
output_dir_path = dir_path
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = output_dir_path + '/' + samp_file
site_file = output_dir_path + '/' + site_file
meas_file = output_dir_path + '/' + meas_file
samplist = []
try:
Samps, file_type = pmag.magic_read(samp_file)
for samp in Samps:
if samp['er_sample_name'] not in samplist:
samplist.append(samp['er_sample_name'])
except:
Samps = []
MagRecs = []
try:
f = open(mag_file, 'br')
input = str(f.read()).strip("b '")
f.close()
except Exception as ex:
print('ex', ex)
print("bad mag file")
return False, "bad mag file"
firstline, date = 1, ""
d = input.split('\\xcd')
for line in d:
rec = line.split('\\x00')
if firstline == 1:
firstline = 0
spec, vol = "", 1
el = 51
while line[el:el + 1] != "\\":
spec = spec + line[el]
el += 1
# check for bad sample name
test = spec.split('.')
date = ""
if len(test) > 1:
spec = test[0]
kk = 24
while line[kk] != '\\x01' and line[kk] != '\\x00':
kk += 1
vcc = line[24:kk]
el = 10
while rec[el].strip() != '':
el += 1
date, comments = rec[el + 7], []
else:
el = 9
while rec[el] != '\\x01':
el += 1
vcc, date, comments = rec[el - 3], rec[el + 7], []
specname = spec.lower()
print('importing ', specname)
el += 8
while rec[el].isdigit() == False:
comments.append(rec[el])
el += 1
while rec[el] == "":
el += 1
az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
pl = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip_dir = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip = float(rec[el])
el += 1
while rec[el] == "":
el += 1
if rec[el] == '\\x01':
bed_dip = 180. - bed_dip
el += 1
while rec[el] == "":
el += 1
fold_az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
fold_pl = rec[el]
el += 1
while rec[el] == "":
el += 1
if rec[el] != "" and rec[el] != '\\x02' and rec[el] != '\\x01':
deccorr = float(rec[el])
az += deccorr
bed_dip_dir += deccorr
fold_az += deccorr
if bed_dip_dir >= 360:
bed_dip_dir = bed_dip_dir - 360.
if az >= 360.:
az = az - 360.
if fold_az >= 360.:
fold_az = fold_az - 360.
else:
deccorr = 0
if specnum != 0:
sample = specname[:specnum]
else:
sample = specname
SampRec = {}
SampRec["er_sample_name"] = sample
SampRec["er_location_name"] = location_name
SampRec["er_citation_names"] = "This study"
# convert to labaz, labpl
labaz, labdip = pmag.orient(az, pl, or_con)
#
# parse information common to all orientation methods
#
SampRec["sample_bed_dip"] = '%7.1f' % (bed_dip)
SampRec["sample_bed_dip_direction"] = '%7.1f' % (bed_dip_dir)
SampRec["sample_dip"] = '%7.1f' % (labdip)
SampRec["sample_azimuth"] = '%7.1f' % (labaz)
if vcc.strip() != "":
vol = float(vcc) * 1e-6 # convert to m^3 from cc
SampRec["sample_volume"] = '%10.3e' % (vol)
SampRec["sample_class"] = sclass
SampRec["sample_lithology"] = lithology
SampRec["sample_type"] = _type
SampRec["sample_declination_correction"] = '%7.1f' % (deccorr)
methods = gmeths.split(':')
if deccorr != "0":
if 'SO-MAG' in methods:
del methods[methods.index('SO-MAG')]
methods.append('SO-CMD-NORTH')
meths = ""
for meth in methods:
meths = meths + meth + ":"
meths = meths[:-1]
SampRec["magic_method_codes"] = meths
if int(samp_con) < 6 or int(samp_con) == 7:
# parse out the site name
site = pmag.parse_site(SampRec["er_sample_name"], samp_con, Z)
SampRec["er_site_name"] = site
elif len(Samps) > 1:
site, location = "", ""
for samp in Samps:
if samp["er_sample_name"] == SampRec["er_sample_name"]:
site = samp["er_site_name"]
location = samp["er_location_name"]
break
SampRec["er_location_name"] = samp["er_location_name"]
SampRec["er_site_name"] = samp["er_site_name"]
if sample not in samplist:
samplist.append(sample)
Samps.append(SampRec)
else:
MagRec = {}
MagRec["treatment_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"] = '0'
MagRec["treatment_dc_field"] = '0'
MagRec["treatment_dc_field_phi"] = '0'
MagRec["treatment_dc_field_theta"] = '0'
meas_type = "LT-NO"
MagRec["measurement_flag"] = 'g'
MagRec["measurement_standard"] = 'u'
MagRec["measurement_number"] = '1'
MagRec["er_specimen_name"] = specname
MagRec["er_sample_name"] = SampRec['er_sample_name']
MagRec["er_site_name"] = SampRec['er_site_name']
MagRec["er_location_name"] = location_name
el, demag = 1, ''
treat = rec[el]
if treat[-1] == 'C':
demag = 'T'
elif treat != 'NRM':
demag = 'AF'
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_dec"] = rec[el]
cdec = float(rec[el])
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_inc"] = rec[el]
cinc = float(rec[el])
el += 1
while rec[el] == "":
el += 1
gdec = rec[el]
el += 1
while rec[el] == "":
el += 1
ginc = rec[el]
el = skip(2, el, rec) # skip bdec,binc
# el=skip(4,el,rec) # skip gdec,ginc,bdec,binc
# print 'moment emu: ',rec[el]
MagRec["measurement_magn_moment"] = '%10.3e' % (
float(rec[el]) * 1e-3) # moment in Am^2 (from emu)
MagRec["measurement_magn_volume"] = '%10.3e' % (
float(rec[el]) * 1e-3 / vol) # magnetization in A/m
el = skip(2, el, rec) # skip to xsig
MagRec["measurement_sd_x"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to ysig
MagRec["measurement_sd_y"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to zsig
MagRec["measurement_sd_z"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el += 1 # skip to positions
MagRec["measurement_positions"] = rec[el]
# el=skip(5,el,rec) # skip to date
# mm=str(months.index(date[0]))
# if len(mm)==1:
# mm='0'+str(mm)
# else:
# mm=str(mm)
# dstring=date[2]+':'+mm+':'+date[1]+":"+date[3]
# MagRec['measurement_date']=dstring
MagRec["magic_instrument_codes"] = inst
MagRec["er_analyst_mail_names"] = ""
MagRec["er_citation_names"] = "This study"
MagRec["magic_method_codes"] = meas_type
if demag == "AF":
MagRec["treatment_ac_field"] = '%8.3e' % (
float(treat[:-2]) * 1e-3) # peak field in tesla
meas_type = "LT-AF-Z"
MagRec["treatment_dc_field"] = '0'
elif demag == "T":
MagRec["treatment_temp"] = '%8.3e' % (
float(treat[:-1]) + 273.) # temp in kelvin
meas_type = "LT-T-Z"
MagRec['magic_method_codes'] = meas_type
MagRecs.append(MagRec)
MagOuts = pmag.measurements_methods(MagRecs, noave)
MagOuts, keylist = pmag.fillkeys(MagOuts)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print("Measurements put in ", meas_file)
SampsOut, sampkeys = pmag.fillkeys(Samps)
pmag.magic_write(samp_file, SampsOut, "er_samples")
Sites = []
for samp in Samps:
SiteRec = {}
SiteRec['er_site_name'] = samp['er_site_name']
SiteRec['er_location_name'] = samp['er_location_name']
SiteRec['site_definition'] = 's'
SiteRec['er_citation_names'] = 'This study'
if 'sample_class' in list(samp.keys()):
SiteRec['site_class'] = samp['sample_class']
if 'sample_lithology' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_type' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_lat' in list(samp.keys()):
SiteRec['site_lat'] = samp['sample_lat']
else:
SiteRec['site_lat'] = "-999"
if 'sample_lon' in list(samp.keys()):
SiteRec['site_lon'] = samp['sample_lon']
else:
SiteRec['site_lon'] = "-999"
if 'sample_height' in list(samp.keys()):
SiteRec['site_height'] = samp['sample_height']
Sites.append(SiteRec)
pmag.magic_write(site_file, Sites, 'er_sites')
return True, meas_file
|
def function[main, parameter[command_line]]:
constant[
NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files
]
variable[mag_file] assign[=] constant[]
variable[specnum] assign[=] constant[0]
<ast.Tuple object at 0x7da1b0398fd0> assign[=] tuple[[<ast.Constant object at 0x7da1b0399270>, <ast.Constant object at 0x7da1b03993c0>, <ast.Constant object at 0x7da1b0398d30>, <ast.Constant object at 0x7da1b0399780>, <ast.Constant object at 0x7da1b0398d00>]]
<ast.Tuple object at 0x7da1b039af80> assign[=] tuple[[<ast.Constant object at 0x7da1b0399120>, <ast.Constant object at 0x7da1b039ac80>]]
variable[noave] assign[=] constant[1]
variable[args] assign[=] name[sys].argv
<ast.Tuple object at 0x7da1b0398e20> assign[=] tuple[[<ast.Constant object at 0x7da1b0398160>, <ast.Constant object at 0x7da1b03980d0>]]
<ast.Tuple object at 0x7da1b039a860> assign[=] tuple[[<ast.Constant object at 0x7da1b0398f10>, <ast.Constant object at 0x7da1b039beb0>, <ast.Constant object at 0x7da1b039bd00>]]
variable[meths] assign[=] constant[FS-FD]
<ast.Tuple object at 0x7da1b039b370> assign[=] tuple[[<ast.Constant object at 0x7da1b0398100>, <ast.Constant object at 0x7da1b0399420>, <ast.Constant object at 0x7da1b03999c0>]]
<ast.Tuple object at 0x7da1b0399f60> assign[=] tuple[[<ast.Constant object at 0x7da1b03994b0>, <ast.Constant object at 0x7da1b039a770>]]
variable[DecCorr] assign[=] constant[0.0]
variable[location_name] assign[=] constant[unknown]
variable[months] assign[=] list[[<ast.Constant object at 0x7da1b039b4f0>, <ast.Constant object at 0x7da1b039a0e0>, <ast.Constant object at 0x7da1b039b070>, <ast.Constant object at 0x7da1b039a4d0>, <ast.Constant object at 0x7da1b039bac0>, <ast.Constant object at 0x7da1b0398f40>, <ast.Constant object at 0x7da1b03999f0>, <ast.Constant object at 0x7da1b03995a0>, <ast.Constant object at 0x7da1b0399e40>, <ast.Constant object at 0x7da1b0399540>, <ast.Constant object at 0x7da1b0399900>, <ast.Constant object at 0x7da1b0399990>]]
variable[gmeths] assign[=] constant[]
variable[dir_path] assign[=] constant[.]
if name[command_line] begin[:]
if compare[constant[-WD] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-WD]]]
variable[dir_path] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-h] in name[args]] begin[:]
call[name[print], parameter[name[main].__doc__]]
return[constant[False]]
if compare[constant[-f] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-f]]]
variable[mag_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-fpos] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-fpos]]]
variable[pos_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-F] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-F]]]
variable[meas_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-Fsa] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-Fsa]]]
variable[samp_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-Fsi] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-Fsi]]]
variable[site_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-ocn] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-ocn]]]
variable[or_con] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-ncn] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-ncn]]]
variable[samp_con] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-mcd] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-mcd]]]
variable[gmeths] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-loc] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-loc]]]
variable[location_name] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-spc] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-spc]]]
variable[specnum] assign[=] call[name[int], parameter[call[name[args]][binary_operation[name[ind] + constant[1]]]]]
if compare[constant[-ins] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-ins]]]
variable[inst] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-a] in name[args]] begin[:]
variable[noave] assign[=] constant[0]
variable[ID] assign[=] constant[False]
if compare[constant[-ID] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-ID]]]
variable[ID] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if <ast.UnaryOp object at 0x7da18dc9a860> begin[:]
variable[dir_path] assign[=] call[name[kwargs].get, parameter[constant[dir_path], constant[.]]]
variable[mag_file] assign[=] call[name[kwargs].get, parameter[constant[mag_file], constant[]]]
variable[pos_file] assign[=] call[name[kwargs].get, parameter[constant[pos_file], constant[]]]
variable[meas_file] assign[=] call[name[kwargs].get, parameter[constant[meas_file], constant[magic_measurements.txt]]]
variable[samp_file] assign[=] call[name[kwargs].get, parameter[constant[samp_file], constant[er_samples.txt]]]
variable[site_file] assign[=] call[name[kwargs].get, parameter[constant[site_file], constant[er_sites.txt]]]
variable[or_con] assign[=] call[name[kwargs].get, parameter[constant[or_con], constant[3]]]
variable[samp_con] assign[=] call[name[kwargs].get, parameter[constant[samp_con], constant[2]]]
variable[corr] assign[=] call[name[kwargs].get, parameter[constant[corr], constant[1]]]
variable[gmeths] assign[=] call[name[kwargs].get, parameter[constant[gmeths], constant[]]]
variable[location_name] assign[=] call[name[kwargs].get, parameter[constant[location_name], constant[]]]
variable[specnum] assign[=] call[name[int], parameter[call[name[kwargs].get, parameter[constant[specnum], constant[0]]]]]
variable[inst] assign[=] call[name[kwargs].get, parameter[constant[inst], constant[]]]
variable[noave] assign[=] call[name[kwargs].get, parameter[constant[noave], constant[1]]]
variable[ID] assign[=] call[name[kwargs].get, parameter[constant[ID], constant[]]]
if compare[name[specnum] not_equal[!=] constant[0]] begin[:]
variable[specnum] assign[=] <ast.UnaryOp object at 0x7da1b26ac910>
if name[ID] begin[:]
variable[input_dir_path] assign[=] name[ID]
if name[samp_con] begin[:]
if compare[constant[4] in name[samp_con]] begin[:]
if compare[constant[-] <ast.NotIn object at 0x7da2590d7190> name[samp_con]] begin[:]
call[name[print], parameter[constant[option [4] must be in form 4-Z where Z is an integer]]]
return[tuple[[<ast.Constant object at 0x7da1b26acb80>, <ast.Constant object at 0x7da1b26acaf0>]]]
if compare[constant[7] in name[samp_con]] begin[:]
if compare[constant[-] <ast.NotIn object at 0x7da2590d7190> name[samp_con]] begin[:]
call[name[print], parameter[constant[option [7] must be in form 7-Z where Z is an integer]]]
return[tuple[[<ast.Constant object at 0x7da1b26afa90>, <ast.Constant object at 0x7da1b26af370>]]]
if compare[constant[6] in name[samp_con]] begin[:]
call[name[print], parameter[constant[Naming convention option [6] not currently supported]]]
return[tuple[[<ast.Constant object at 0x7da1b26aca60>, <ast.Constant object at 0x7da1b26ac040>]]]
<ast.Try object at 0x7da1b26ac5e0>
if compare[name[file_type] equal[==] constant[bad_file]] begin[:]
call[name[print], parameter[constant[there is no er_samples.txt file in your input directory - you can't use naming convention #6]]]
return[tuple[[<ast.Constant object at 0x7da20c6c6590>, <ast.Constant object at 0x7da20c6c4910>]]]
if <ast.UnaryOp object at 0x7da20c6c7700> begin[:]
call[name[print], parameter[constant[mag file is required input]]]
return[tuple[[<ast.Constant object at 0x7da20c6c6920>, <ast.Constant object at 0x7da20c6c5270>]]]
variable[output_dir_path] assign[=] name[dir_path]
variable[mag_file] assign[=] call[name[os].path.join, parameter[name[input_dir_path], name[mag_file]]]
variable[samp_file] assign[=] binary_operation[binary_operation[name[output_dir_path] + constant[/]] + name[samp_file]]
variable[site_file] assign[=] binary_operation[binary_operation[name[output_dir_path] + constant[/]] + name[site_file]]
variable[meas_file] assign[=] binary_operation[binary_operation[name[output_dir_path] + constant[/]] + name[meas_file]]
variable[samplist] assign[=] list[[]]
<ast.Try object at 0x7da20c6c6290>
variable[MagRecs] assign[=] list[[]]
<ast.Try object at 0x7da20c6c60e0>
<ast.Tuple object at 0x7da20c6c7fd0> assign[=] tuple[[<ast.Constant object at 0x7da20c6c6da0>, <ast.Constant object at 0x7da20c6c6200>]]
variable[d] assign[=] call[name[input].split, parameter[constant[\xcd]]]
for taget[name[line]] in starred[name[d]] begin[:]
variable[rec] assign[=] call[name[line].split, parameter[constant[\x00]]]
if compare[name[firstline] equal[==] constant[1]] begin[:]
variable[firstline] assign[=] constant[0]
<ast.Tuple object at 0x7da20c6c48e0> assign[=] tuple[[<ast.Constant object at 0x7da20c6c4a00>, <ast.Constant object at 0x7da20c6c5240>]]
variable[el] assign[=] constant[51]
while compare[call[name[line]][<ast.Slice object at 0x7da20c6c5c90>] not_equal[!=] constant[\]] begin[:]
variable[spec] assign[=] binary_operation[name[spec] + call[name[line]][name[el]]]
<ast.AugAssign object at 0x7da20c6c63e0>
variable[test] assign[=] call[name[spec].split, parameter[constant[.]]]
variable[date] assign[=] constant[]
if compare[call[name[len], parameter[name[test]]] greater[>] constant[1]] begin[:]
variable[spec] assign[=] call[name[test]][constant[0]]
variable[kk] assign[=] constant[24]
while <ast.BoolOp object at 0x7da18f810b20> begin[:]
<ast.AugAssign object at 0x7da18f811090>
variable[vcc] assign[=] call[name[line]][<ast.Slice object at 0x7da18f812560>]
variable[el] assign[=] constant[10]
while compare[call[call[name[rec]][name[el]].strip, parameter[]] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b2346020>
<ast.Tuple object at 0x7da1b2344fa0> assign[=] tuple[[<ast.Subscript object at 0x7da1b2344520>, <ast.List object at 0x7da1b23460b0>]]
variable[specname] assign[=] call[name[spec].lower, parameter[]]
call[name[print], parameter[constant[importing ], name[specname]]]
<ast.AugAssign object at 0x7da1b2344ee0>
while compare[call[call[name[rec]][name[el]].isdigit, parameter[]] equal[==] constant[False]] begin[:]
call[name[comments].append, parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b2347b20>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b23441c0>
variable[az] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b2347910>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b23468c0>
variable[pl] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b2344670>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b2344b50>
variable[bed_dip_dir] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b2346650>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b2347610>
variable[bed_dip] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b03992a0>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b039b580>
if compare[call[name[rec]][name[el]] equal[==] constant[\x01]] begin[:]
variable[bed_dip] assign[=] binary_operation[constant[180.0] - name[bed_dip]]
<ast.AugAssign object at 0x7da1b03985b0>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b0398c10>
variable[fold_az] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b0399de0>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b039a320>
variable[fold_pl] assign[=] call[name[rec]][name[el]]
<ast.AugAssign object at 0x7da1b035a380>
while compare[call[name[rec]][name[el]] equal[==] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b03598a0>
if <ast.BoolOp object at 0x7da1b035b2e0> begin[:]
variable[deccorr] assign[=] call[name[float], parameter[call[name[rec]][name[el]]]]
<ast.AugAssign object at 0x7da1b03585e0>
<ast.AugAssign object at 0x7da1b03590f0>
<ast.AugAssign object at 0x7da1b035ba90>
if compare[name[bed_dip_dir] greater_or_equal[>=] constant[360]] begin[:]
variable[bed_dip_dir] assign[=] binary_operation[name[bed_dip_dir] - constant[360.0]]
if compare[name[az] greater_or_equal[>=] constant[360.0]] begin[:]
variable[az] assign[=] binary_operation[name[az] - constant[360.0]]
if compare[name[fold_az] greater_or_equal[>=] constant[360.0]] begin[:]
variable[fold_az] assign[=] binary_operation[name[fold_az] - constant[360.0]]
if compare[name[specnum] not_equal[!=] constant[0]] begin[:]
variable[sample] assign[=] call[name[specname]][<ast.Slice object at 0x7da1b03598d0>]
variable[SampRec] assign[=] dictionary[[], []]
call[name[SampRec]][constant[er_sample_name]] assign[=] name[sample]
call[name[SampRec]][constant[er_location_name]] assign[=] name[location_name]
call[name[SampRec]][constant[er_citation_names]] assign[=] constant[This study]
<ast.Tuple object at 0x7da1b0358ca0> assign[=] call[name[pmag].orient, parameter[name[az], name[pl], name[or_con]]]
call[name[SampRec]][constant[sample_bed_dip]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> name[bed_dip]]
call[name[SampRec]][constant[sample_bed_dip_direction]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> name[bed_dip_dir]]
call[name[SampRec]][constant[sample_dip]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> name[labdip]]
call[name[SampRec]][constant[sample_azimuth]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> name[labaz]]
if compare[call[name[vcc].strip, parameter[]] not_equal[!=] constant[]] begin[:]
variable[vol] assign[=] binary_operation[call[name[float], parameter[name[vcc]]] * constant[1e-06]]
call[name[SampRec]][constant[sample_volume]] assign[=] binary_operation[constant[%10.3e] <ast.Mod object at 0x7da2590d6920> name[vol]]
call[name[SampRec]][constant[sample_class]] assign[=] name[sclass]
call[name[SampRec]][constant[sample_lithology]] assign[=] name[lithology]
call[name[SampRec]][constant[sample_type]] assign[=] name[_type]
call[name[SampRec]][constant[sample_declination_correction]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> name[deccorr]]
variable[methods] assign[=] call[name[gmeths].split, parameter[constant[:]]]
if compare[name[deccorr] not_equal[!=] constant[0]] begin[:]
if compare[constant[SO-MAG] in name[methods]] begin[:]
<ast.Delete object at 0x7da1b03592a0>
call[name[methods].append, parameter[constant[SO-CMD-NORTH]]]
variable[meths] assign[=] constant[]
for taget[name[meth]] in starred[name[methods]] begin[:]
variable[meths] assign[=] binary_operation[binary_operation[name[meths] + name[meth]] + constant[:]]
variable[meths] assign[=] call[name[meths]][<ast.Slice object at 0x7da1b035a260>]
call[name[SampRec]][constant[magic_method_codes]] assign[=] name[meths]
if <ast.BoolOp object at 0x7da1b0358040> begin[:]
variable[site] assign[=] call[name[pmag].parse_site, parameter[call[name[SampRec]][constant[er_sample_name]], name[samp_con], name[Z]]]
call[name[SampRec]][constant[er_site_name]] assign[=] name[site]
if compare[name[sample] <ast.NotIn object at 0x7da2590d7190> name[samplist]] begin[:]
call[name[samplist].append, parameter[name[sample]]]
call[name[Samps].append, parameter[name[SampRec]]]
variable[MagOuts] assign[=] call[name[pmag].measurements_methods, parameter[name[MagRecs], name[noave]]]
<ast.Tuple object at 0x7da204567370> assign[=] call[name[pmag].fillkeys, parameter[name[MagOuts]]]
call[name[pmag].magic_write, parameter[name[meas_file], name[MagOuts], constant[magic_measurements]]]
call[name[print], parameter[constant[Measurements put in ], name[meas_file]]]
<ast.Tuple object at 0x7da204566260> assign[=] call[name[pmag].fillkeys, parameter[name[Samps]]]
call[name[pmag].magic_write, parameter[name[samp_file], name[SampsOut], constant[er_samples]]]
variable[Sites] assign[=] list[[]]
for taget[name[samp]] in starred[name[Samps]] begin[:]
variable[SiteRec] assign[=] dictionary[[], []]
call[name[SiteRec]][constant[er_site_name]] assign[=] call[name[samp]][constant[er_site_name]]
call[name[SiteRec]][constant[er_location_name]] assign[=] call[name[samp]][constant[er_location_name]]
call[name[SiteRec]][constant[site_definition]] assign[=] constant[s]
call[name[SiteRec]][constant[er_citation_names]] assign[=] constant[This study]
if compare[constant[sample_class] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_class]] assign[=] call[name[samp]][constant[sample_class]]
if compare[constant[sample_lithology] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_lithology]] assign[=] call[name[samp]][constant[sample_lithology]]
if compare[constant[sample_type] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_lithology]] assign[=] call[name[samp]][constant[sample_lithology]]
if compare[constant[sample_lat] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_lat]] assign[=] call[name[samp]][constant[sample_lat]]
if compare[constant[sample_lon] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_lon]] assign[=] call[name[samp]][constant[sample_lon]]
if compare[constant[sample_height] in call[name[list], parameter[call[name[samp].keys, parameter[]]]]] begin[:]
call[name[SiteRec]][constant[site_height]] assign[=] call[name[samp]][constant[sample_height]]
call[name[Sites].append, parameter[name[SiteRec]]]
call[name[pmag].magic_write, parameter[name[site_file], name[Sites], constant[er_sites]]]
return[tuple[[<ast.Constant object at 0x7da2044c3580>, <ast.Name object at 0x7da2044c0dc0>]]]
|
keyword[def] identifier[main] ( identifier[command_line] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[mag_file] = literal[string]
identifier[specnum] = literal[int]
identifier[ub_file] , identifier[samp_file] , identifier[or_con] , identifier[corr] , identifier[meas_file] = literal[string] , literal[string] , literal[string] , literal[string] , literal[string]
identifier[pos_file] , identifier[site_file] = literal[string] , literal[string]
identifier[noave] = literal[int]
identifier[args] = identifier[sys] . identifier[argv]
identifier[bed_dip] , identifier[bed_dip_dir] = literal[string] , literal[string]
identifier[samp_con] , identifier[Z] , identifier[average_bedding] = literal[string] , literal[int] , literal[string]
identifier[meths] = literal[string]
identifier[sclass] , identifier[lithology] , identifier[_type] = literal[string] , literal[string] , literal[string]
identifier[user] , identifier[inst] = literal[string] , literal[string]
identifier[DecCorr] = literal[int]
identifier[location_name] = literal[string]
identifier[months] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[gmeths] = literal[string]
identifier[dir_path] = literal[string]
keyword[if] identifier[command_line] :
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[dir_path] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[print] ( identifier[main] . identifier[__doc__] )
keyword[return] keyword[False]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[mag_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[pos_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[meas_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[samp_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[site_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[or_con] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[samp_con] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[gmeths] =( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[location_name] =( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[specnum] = identifier[int] ( identifier[args] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[inst] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[noave] = literal[int]
identifier[ID] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[ID] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] keyword[not] identifier[command_line] :
identifier[dir_path] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[mag_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[pos_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[meas_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[samp_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[site_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[or_con] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[samp_con] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[corr] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[gmeths] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[location_name] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[specnum] = identifier[int] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ))
identifier[inst] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[noave] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[ID] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[specnum] != literal[int] :
identifier[specnum] =- identifier[specnum]
keyword[if] identifier[ID] :
identifier[input_dir_path] = identifier[ID]
keyword[else] :
identifier[input_dir_path] = identifier[dir_path]
keyword[if] identifier[samp_con] :
keyword[if] literal[string] keyword[in] identifier[samp_con] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[samp_con] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[else] :
identifier[Z] = identifier[samp_con] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[samp_con] = literal[string]
keyword[if] literal[string] keyword[in] identifier[samp_con] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[samp_con] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[else] :
identifier[Z] = identifier[samp_con] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[samp_con] = literal[string]
keyword[if] literal[string] keyword[in] identifier[samp_con] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[try] :
identifier[Samps] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
identifier[os] . identifier[path] . identifier[join] ( identifier[input_dir_path] , literal[string] ))
keyword[except] :
identifier[print] (
literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[if] identifier[file_type] == literal[string] :
identifier[print] (
literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[if] keyword[not] identifier[mag_file] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
identifier[output_dir_path] = identifier[dir_path]
identifier[mag_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[input_dir_path] , identifier[mag_file] )
identifier[samp_file] = identifier[output_dir_path] + literal[string] + identifier[samp_file]
identifier[site_file] = identifier[output_dir_path] + literal[string] + identifier[site_file]
identifier[meas_file] = identifier[output_dir_path] + literal[string] + identifier[meas_file]
identifier[samplist] =[]
keyword[try] :
identifier[Samps] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[samp_file] )
keyword[for] identifier[samp] keyword[in] identifier[Samps] :
keyword[if] identifier[samp] [ literal[string] ] keyword[not] keyword[in] identifier[samplist] :
identifier[samplist] . identifier[append] ( identifier[samp] [ literal[string] ])
keyword[except] :
identifier[Samps] =[]
identifier[MagRecs] =[]
keyword[try] :
identifier[f] = identifier[open] ( identifier[mag_file] , literal[string] )
identifier[input] = identifier[str] ( identifier[f] . identifier[read] ()). identifier[strip] ( literal[string] )
identifier[f] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[print] ( literal[string] , identifier[ex] )
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
identifier[firstline] , identifier[date] = literal[int] , literal[string]
identifier[d] = identifier[input] . identifier[split] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[d] :
identifier[rec] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] identifier[firstline] == literal[int] :
identifier[firstline] = literal[int]
identifier[spec] , identifier[vol] = literal[string] , literal[int]
identifier[el] = literal[int]
keyword[while] identifier[line] [ identifier[el] : identifier[el] + literal[int] ]!= literal[string] :
identifier[spec] = identifier[spec] + identifier[line] [ identifier[el] ]
identifier[el] += literal[int]
identifier[test] = identifier[spec] . identifier[split] ( literal[string] )
identifier[date] = literal[string]
keyword[if] identifier[len] ( identifier[test] )> literal[int] :
identifier[spec] = identifier[test] [ literal[int] ]
identifier[kk] = literal[int]
keyword[while] identifier[line] [ identifier[kk] ]!= literal[string] keyword[and] identifier[line] [ identifier[kk] ]!= literal[string] :
identifier[kk] += literal[int]
identifier[vcc] = identifier[line] [ literal[int] : identifier[kk] ]
identifier[el] = literal[int]
keyword[while] identifier[rec] [ identifier[el] ]. identifier[strip] ()!= literal[string] :
identifier[el] += literal[int]
identifier[date] , identifier[comments] = identifier[rec] [ identifier[el] + literal[int] ],[]
keyword[else] :
identifier[el] = literal[int]
keyword[while] identifier[rec] [ identifier[el] ]!= literal[string] :
identifier[el] += literal[int]
identifier[vcc] , identifier[date] , identifier[comments] = identifier[rec] [ identifier[el] - literal[int] ], identifier[rec] [ identifier[el] + literal[int] ],[]
identifier[specname] = identifier[spec] . identifier[lower] ()
identifier[print] ( literal[string] , identifier[specname] )
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]. identifier[isdigit] ()== keyword[False] :
identifier[comments] . identifier[append] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[az] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[pl] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[bed_dip_dir] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[bed_dip] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
keyword[if] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[bed_dip] = literal[int] - identifier[bed_dip]
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[fold_az] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[fold_pl] = identifier[rec] [ identifier[el] ]
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
keyword[if] identifier[rec] [ identifier[el] ]!= literal[string] keyword[and] identifier[rec] [ identifier[el] ]!= literal[string] keyword[and] identifier[rec] [ identifier[el] ]!= literal[string] :
identifier[deccorr] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[az] += identifier[deccorr]
identifier[bed_dip_dir] += identifier[deccorr]
identifier[fold_az] += identifier[deccorr]
keyword[if] identifier[bed_dip_dir] >= literal[int] :
identifier[bed_dip_dir] = identifier[bed_dip_dir] - literal[int]
keyword[if] identifier[az] >= literal[int] :
identifier[az] = identifier[az] - literal[int]
keyword[if] identifier[fold_az] >= literal[int] :
identifier[fold_az] = identifier[fold_az] - literal[int]
keyword[else] :
identifier[deccorr] = literal[int]
keyword[if] identifier[specnum] != literal[int] :
identifier[sample] = identifier[specname] [: identifier[specnum] ]
keyword[else] :
identifier[sample] = identifier[specname]
identifier[SampRec] ={}
identifier[SampRec] [ literal[string] ]= identifier[sample]
identifier[SampRec] [ literal[string] ]= identifier[location_name]
identifier[SampRec] [ literal[string] ]= literal[string]
identifier[labaz] , identifier[labdip] = identifier[pmag] . identifier[orient] ( identifier[az] , identifier[pl] , identifier[or_con] )
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[bed_dip] )
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[bed_dip_dir] )
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[labdip] )
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[labaz] )
keyword[if] identifier[vcc] . identifier[strip] ()!= literal[string] :
identifier[vol] = identifier[float] ( identifier[vcc] )* literal[int]
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[vol] )
identifier[SampRec] [ literal[string] ]= identifier[sclass]
identifier[SampRec] [ literal[string] ]= identifier[lithology]
identifier[SampRec] [ literal[string] ]= identifier[_type]
identifier[SampRec] [ literal[string] ]= literal[string] %( identifier[deccorr] )
identifier[methods] = identifier[gmeths] . identifier[split] ( literal[string] )
keyword[if] identifier[deccorr] != literal[string] :
keyword[if] literal[string] keyword[in] identifier[methods] :
keyword[del] identifier[methods] [ identifier[methods] . identifier[index] ( literal[string] )]
identifier[methods] . identifier[append] ( literal[string] )
identifier[meths] = literal[string]
keyword[for] identifier[meth] keyword[in] identifier[methods] :
identifier[meths] = identifier[meths] + identifier[meth] + literal[string]
identifier[meths] = identifier[meths] [:- literal[int] ]
identifier[SampRec] [ literal[string] ]= identifier[meths]
keyword[if] identifier[int] ( identifier[samp_con] )< literal[int] keyword[or] identifier[int] ( identifier[samp_con] )== literal[int] :
identifier[site] = identifier[pmag] . identifier[parse_site] ( identifier[SampRec] [ literal[string] ], identifier[samp_con] , identifier[Z] )
identifier[SampRec] [ literal[string] ]= identifier[site]
keyword[elif] identifier[len] ( identifier[Samps] )> literal[int] :
identifier[site] , identifier[location] = literal[string] , literal[string]
keyword[for] identifier[samp] keyword[in] identifier[Samps] :
keyword[if] identifier[samp] [ literal[string] ]== identifier[SampRec] [ literal[string] ]:
identifier[site] = identifier[samp] [ literal[string] ]
identifier[location] = identifier[samp] [ literal[string] ]
keyword[break]
identifier[SampRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
identifier[SampRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[if] identifier[sample] keyword[not] keyword[in] identifier[samplist] :
identifier[samplist] . identifier[append] ( identifier[sample] )
identifier[Samps] . identifier[append] ( identifier[SampRec] )
keyword[else] :
identifier[MagRec] ={}
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[meas_type] = literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[specname]
identifier[MagRec] [ literal[string] ]= identifier[SampRec] [ literal[string] ]
identifier[MagRec] [ literal[string] ]= identifier[SampRec] [ literal[string] ]
identifier[MagRec] [ literal[string] ]= identifier[location_name]
identifier[el] , identifier[demag] = literal[int] , literal[string]
identifier[treat] = identifier[rec] [ identifier[el] ]
keyword[if] identifier[treat] [- literal[int] ]== literal[string] :
identifier[demag] = literal[string]
keyword[elif] identifier[treat] != literal[string] :
identifier[demag] = literal[string]
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ identifier[el] ]
identifier[cdec] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ identifier[el] ]
identifier[cinc] = identifier[float] ( identifier[rec] [ identifier[el] ])
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[gdec] = identifier[rec] [ identifier[el] ]
identifier[el] += literal[int]
keyword[while] identifier[rec] [ identifier[el] ]== literal[string] :
identifier[el] += literal[int]
identifier[ginc] = identifier[rec] [ identifier[el] ]
identifier[el] = identifier[skip] ( literal[int] , identifier[el] , identifier[rec] )
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[rec] [ identifier[el] ])* literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[rec] [ identifier[el] ])* literal[int] / identifier[vol] )
identifier[el] = identifier[skip] ( literal[int] , identifier[el] , identifier[rec] )
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[rec] [ identifier[el] ])* literal[int] )
identifier[el] = identifier[skip] ( literal[int] , identifier[el] , identifier[rec] )
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[rec] [ identifier[el] ])* literal[int] )
identifier[el] = identifier[skip] ( literal[int] , identifier[el] , identifier[rec] )
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[rec] [ identifier[el] ])* literal[int] )
identifier[el] += literal[int]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ identifier[el] ]
identifier[MagRec] [ literal[string] ]= identifier[inst]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[meas_type]
keyword[if] identifier[demag] == literal[string] :
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[treat] [:- literal[int] ])* literal[int] )
identifier[meas_type] = literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
keyword[elif] identifier[demag] == literal[string] :
identifier[MagRec] [ literal[string] ]= literal[string] %(
identifier[float] ( identifier[treat] [:- literal[int] ])+ literal[int] )
identifier[meas_type] = literal[string]
identifier[MagRec] [ literal[string] ]= identifier[meas_type]
identifier[MagRecs] . identifier[append] ( identifier[MagRec] )
identifier[MagOuts] = identifier[pmag] . identifier[measurements_methods] ( identifier[MagRecs] , identifier[noave] )
identifier[MagOuts] , identifier[keylist] = identifier[pmag] . identifier[fillkeys] ( identifier[MagOuts] )
identifier[pmag] . identifier[magic_write] ( identifier[meas_file] , identifier[MagOuts] , literal[string] )
identifier[print] ( literal[string] , identifier[meas_file] )
identifier[SampsOut] , identifier[sampkeys] = identifier[pmag] . identifier[fillkeys] ( identifier[Samps] )
identifier[pmag] . identifier[magic_write] ( identifier[samp_file] , identifier[SampsOut] , literal[string] )
identifier[Sites] =[]
keyword[for] identifier[samp] keyword[in] identifier[Samps] :
identifier[SiteRec] ={}
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
identifier[SiteRec] [ literal[string] ]= literal[string]
identifier[SiteRec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[else] :
identifier[SiteRec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
keyword[else] :
identifier[SiteRec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[samp] . identifier[keys] ()):
identifier[SiteRec] [ literal[string] ]= identifier[samp] [ literal[string] ]
identifier[Sites] . identifier[append] ( identifier[SiteRec] )
identifier[pmag] . identifier[magic_write] ( identifier[site_file] , identifier[Sites] , literal[string] )
keyword[return] keyword[True] , identifier[meas_file]
|
def main(command_line=True, **kwargs):
"""
NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail ltauxe@ucsd.edu for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files
"""
#
# initialize variables
#
mag_file = ''
specnum = 0
(ub_file, samp_file, or_con, corr, meas_file) = ('', 'er_samples.txt', '3', '1', 'magic_measurements.txt')
(pos_file, site_file) = ('', 'er_sites.txt')
noave = 1
args = sys.argv
(bed_dip, bed_dip_dir) = ('', '')
(samp_con, Z, average_bedding) = ('2', 1, '0')
meths = 'FS-FD'
(sclass, lithology, _type) = ('', '', '')
(user, inst) = ('', '')
DecCorr = 0.0
location_name = 'unknown'
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
gmeths = ''
#
#
dir_path = '.'
if command_line:
if '-WD' in args:
ind = args.index('-WD')
dir_path = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-h' in args:
print(main.__doc__)
return False # depends on [control=['if'], data=[]]
if '-f' in args:
ind = args.index('-f')
mag_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-fpos' in args:
ind = args.index('-fpos')
pos_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-F' in args:
ind = args.index('-F')
meas_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-Fsa' in args:
ind = args.index('-Fsa')
samp_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-Fsi' in args:
ind = args.index('-Fsi')
site_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-ocn' in args:
ind = args.index('-ocn')
or_con = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-ncn' in args:
ind = args.index('-ncn')
samp_con = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-mcd' in args:
ind = args.index('-mcd')
gmeths = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-loc' in args:
ind = args.index('-loc')
location_name = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-spc' in args:
ind = args.index('-spc')
specnum = int(args[ind + 1]) # depends on [control=['if'], data=['args']]
if '-ins' in args:
ind = args.index('-ins')
inst = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-a' in args:
noave = 0 # depends on [control=['if'], data=[]]
#
ID = False
if '-ID' in args:
ind = args.index('-ID')
ID = args[ind + 1] # depends on [control=['if'], data=['args']] # depends on [control=['if'], data=[]]
#
if not command_line:
dir_path = kwargs.get('dir_path', '.')
mag_file = kwargs.get('mag_file', '')
pos_file = kwargs.get('pos_file', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
site_file = kwargs.get('site_file', 'er_sites.txt')
or_con = kwargs.get('or_con', '3')
samp_con = kwargs.get('samp_con', '2')
corr = kwargs.get('corr', '1')
gmeths = kwargs.get('gmeths', '')
location_name = kwargs.get('location_name', '')
specnum = int(kwargs.get('specnum', 0))
inst = kwargs.get('inst', '')
noave = kwargs.get('noave', 1) # default is DO average
ID = kwargs.get('ID', '') # depends on [control=['if'], data=[]]
# format and fix variables acquired from command line args or input with
# **kwargs
if specnum != 0:
specnum = -specnum # depends on [control=['if'], data=['specnum']]
if ID:
input_dir_path = ID # depends on [control=['if'], data=[]]
else:
input_dir_path = dir_path
if samp_con:
if '4' in samp_con:
if '-' not in samp_con:
print('option [4] must be in form 4-Z where Z is an integer')
return (False, 'option [4] must be in form 4-Z where Z is an integer') # depends on [control=['if'], data=[]]
else:
Z = samp_con.split('-')[1]
samp_con = '4' # depends on [control=['if'], data=['samp_con']]
if '7' in samp_con:
if '-' not in samp_con:
print('option [7] must be in form 7-Z where Z is an integer')
return (False, 'option [7] must be in form 7-Z where Z is an integer') # depends on [control=['if'], data=[]]
else:
Z = samp_con.split('-')[1]
samp_con = '7' # depends on [control=['if'], data=['samp_con']]
if '6' in samp_con:
print('Naming convention option [6] not currently supported')
return (False, 'Naming convention option [6] not currently supported')
try:
(Samps, file_type) = pmag.magic_read(os.path.join(input_dir_path, 'er_samples.txt')) # depends on [control=['try'], data=[]]
except:
print("there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return (False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6") # depends on [control=['except'], data=[]]
if file_type == 'bad_file':
print("there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return (False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not mag_file:
print('mag file is required input')
return (False, 'mag file is required input') # depends on [control=['if'], data=[]]
output_dir_path = dir_path
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = output_dir_path + '/' + samp_file
site_file = output_dir_path + '/' + site_file
meas_file = output_dir_path + '/' + meas_file
samplist = []
try:
(Samps, file_type) = pmag.magic_read(samp_file)
for samp in Samps:
if samp['er_sample_name'] not in samplist:
samplist.append(samp['er_sample_name']) # depends on [control=['if'], data=['samplist']] # depends on [control=['for'], data=['samp']] # depends on [control=['try'], data=[]]
except:
Samps = [] # depends on [control=['except'], data=[]]
MagRecs = []
try:
f = open(mag_file, 'br')
input = str(f.read()).strip("b '")
f.close() # depends on [control=['try'], data=[]]
except Exception as ex:
print('ex', ex)
print('bad mag file')
return (False, 'bad mag file') # depends on [control=['except'], data=['ex']]
(firstline, date) = (1, '')
d = input.split('\\xcd')
for line in d:
rec = line.split('\\x00')
if firstline == 1:
firstline = 0
(spec, vol) = ('', 1)
el = 51
while line[el:el + 1] != '\\':
spec = spec + line[el]
el += 1 # depends on [control=['while'], data=[]]
# check for bad sample name
test = spec.split('.')
date = ''
if len(test) > 1:
spec = test[0]
kk = 24
while line[kk] != '\\x01' and line[kk] != '\\x00':
kk += 1 # depends on [control=['while'], data=[]]
vcc = line[24:kk]
el = 10
while rec[el].strip() != '':
el += 1 # depends on [control=['while'], data=[]]
(date, comments) = (rec[el + 7], []) # depends on [control=['if'], data=[]]
else:
el = 9
while rec[el] != '\\x01':
el += 1 # depends on [control=['while'], data=[]]
(vcc, date, comments) = (rec[el - 3], rec[el + 7], [])
specname = spec.lower()
print('importing ', specname)
el += 8
while rec[el].isdigit() == False:
comments.append(rec[el])
el += 1 # depends on [control=['while'], data=[]]
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
az = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
pl = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
bed_dip_dir = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
bed_dip = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
if rec[el] == '\\x01':
bed_dip = 180.0 - bed_dip
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
fold_az = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
fold_pl = rec[el]
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
if rec[el] != '' and rec[el] != '\\x02' and (rec[el] != '\\x01'):
deccorr = float(rec[el])
az += deccorr
bed_dip_dir += deccorr
fold_az += deccorr
if bed_dip_dir >= 360:
bed_dip_dir = bed_dip_dir - 360.0 # depends on [control=['if'], data=['bed_dip_dir']]
if az >= 360.0:
az = az - 360.0 # depends on [control=['if'], data=['az']]
if fold_az >= 360.0:
fold_az = fold_az - 360.0 # depends on [control=['if'], data=['fold_az']] # depends on [control=['if'], data=[]]
else:
deccorr = 0
if specnum != 0:
sample = specname[:specnum] # depends on [control=['if'], data=['specnum']]
else:
sample = specname
SampRec = {}
SampRec['er_sample_name'] = sample
SampRec['er_location_name'] = location_name
SampRec['er_citation_names'] = 'This study'
# convert to labaz, labpl
(labaz, labdip) = pmag.orient(az, pl, or_con)
#
# parse information common to all orientation methods
#
SampRec['sample_bed_dip'] = '%7.1f' % bed_dip
SampRec['sample_bed_dip_direction'] = '%7.1f' % bed_dip_dir
SampRec['sample_dip'] = '%7.1f' % labdip
SampRec['sample_azimuth'] = '%7.1f' % labaz
if vcc.strip() != '':
vol = float(vcc) * 1e-06 # convert to m^3 from cc # depends on [control=['if'], data=[]]
SampRec['sample_volume'] = '%10.3e' % vol
SampRec['sample_class'] = sclass
SampRec['sample_lithology'] = lithology
SampRec['sample_type'] = _type
SampRec['sample_declination_correction'] = '%7.1f' % deccorr
methods = gmeths.split(':')
if deccorr != '0':
if 'SO-MAG' in methods:
del methods[methods.index('SO-MAG')] # depends on [control=['if'], data=['methods']]
methods.append('SO-CMD-NORTH') # depends on [control=['if'], data=[]]
meths = ''
for meth in methods:
meths = meths + meth + ':' # depends on [control=['for'], data=['meth']]
meths = meths[:-1]
SampRec['magic_method_codes'] = meths
if int(samp_con) < 6 or int(samp_con) == 7:
# parse out the site name
site = pmag.parse_site(SampRec['er_sample_name'], samp_con, Z)
SampRec['er_site_name'] = site # depends on [control=['if'], data=[]]
elif len(Samps) > 1:
(site, location) = ('', '')
for samp in Samps:
if samp['er_sample_name'] == SampRec['er_sample_name']:
site = samp['er_site_name']
location = samp['er_location_name']
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['samp']]
SampRec['er_location_name'] = samp['er_location_name']
SampRec['er_site_name'] = samp['er_site_name'] # depends on [control=['if'], data=[]]
if sample not in samplist:
samplist.append(sample)
Samps.append(SampRec) # depends on [control=['if'], data=['sample', 'samplist']] # depends on [control=['if'], data=['firstline']]
else:
MagRec = {}
MagRec['treatment_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['measurement_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['treatment_ac_field'] = '0'
MagRec['treatment_dc_field'] = '0'
MagRec['treatment_dc_field_phi'] = '0'
MagRec['treatment_dc_field_theta'] = '0'
meas_type = 'LT-NO'
MagRec['measurement_flag'] = 'g'
MagRec['measurement_standard'] = 'u'
MagRec['measurement_number'] = '1'
MagRec['er_specimen_name'] = specname
MagRec['er_sample_name'] = SampRec['er_sample_name']
MagRec['er_site_name'] = SampRec['er_site_name']
MagRec['er_location_name'] = location_name
(el, demag) = (1, '')
treat = rec[el]
if treat[-1] == 'C':
demag = 'T' # depends on [control=['if'], data=[]]
elif treat != 'NRM':
demag = 'AF' # depends on [control=['if'], data=[]]
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
MagRec['measurement_dec'] = rec[el]
cdec = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
MagRec['measurement_inc'] = rec[el]
cinc = float(rec[el])
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
gdec = rec[el]
el += 1
while rec[el] == '':
el += 1 # depends on [control=['while'], data=[]]
ginc = rec[el]
el = skip(2, el, rec) # skip bdec,binc
# el=skip(4,el,rec) # skip gdec,ginc,bdec,binc
# print 'moment emu: ',rec[el]
MagRec['measurement_magn_moment'] = '%10.3e' % (float(rec[el]) * 0.001) # moment in Am^2 (from emu)
MagRec['measurement_magn_volume'] = '%10.3e' % (float(rec[el]) * 0.001 / vol) # magnetization in A/m
el = skip(2, el, rec) # skip to xsig
MagRec['measurement_sd_x'] = '%10.3e' % (float(rec[el]) * 0.001) # convert from emu
el = skip(3, el, rec) # skip to ysig
MagRec['measurement_sd_y'] = '%10.3e' % (float(rec[el]) * 0.001) # convert from emu
el = skip(3, el, rec) # skip to zsig
MagRec['measurement_sd_z'] = '%10.3e' % (float(rec[el]) * 0.001) # convert from emu
el += 1 # skip to positions
MagRec['measurement_positions'] = rec[el]
# el=skip(5,el,rec) # skip to date
# mm=str(months.index(date[0]))
# if len(mm)==1:
# mm='0'+str(mm)
# else:
# mm=str(mm)
# dstring=date[2]+':'+mm+':'+date[1]+":"+date[3]
# MagRec['measurement_date']=dstring
MagRec['magic_instrument_codes'] = inst
MagRec['er_analyst_mail_names'] = ''
MagRec['er_citation_names'] = 'This study'
MagRec['magic_method_codes'] = meas_type
if demag == 'AF':
MagRec['treatment_ac_field'] = '%8.3e' % (float(treat[:-2]) * 0.001) # peak field in tesla
meas_type = 'LT-AF-Z'
MagRec['treatment_dc_field'] = '0' # depends on [control=['if'], data=[]]
elif demag == 'T':
MagRec['treatment_temp'] = '%8.3e' % (float(treat[:-1]) + 273.0) # temp in kelvin
meas_type = 'LT-T-Z' # depends on [control=['if'], data=[]]
MagRec['magic_method_codes'] = meas_type
MagRecs.append(MagRec) # depends on [control=['for'], data=['line']]
MagOuts = pmag.measurements_methods(MagRecs, noave)
(MagOuts, keylist) = pmag.fillkeys(MagOuts)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print('Measurements put in ', meas_file)
(SampsOut, sampkeys) = pmag.fillkeys(Samps)
pmag.magic_write(samp_file, SampsOut, 'er_samples')
Sites = []
for samp in Samps:
SiteRec = {}
SiteRec['er_site_name'] = samp['er_site_name']
SiteRec['er_location_name'] = samp['er_location_name']
SiteRec['site_definition'] = 's'
SiteRec['er_citation_names'] = 'This study'
if 'sample_class' in list(samp.keys()):
SiteRec['site_class'] = samp['sample_class'] # depends on [control=['if'], data=[]]
if 'sample_lithology' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology'] # depends on [control=['if'], data=[]]
if 'sample_type' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology'] # depends on [control=['if'], data=[]]
if 'sample_lat' in list(samp.keys()):
SiteRec['site_lat'] = samp['sample_lat'] # depends on [control=['if'], data=[]]
else:
SiteRec['site_lat'] = '-999'
if 'sample_lon' in list(samp.keys()):
SiteRec['site_lon'] = samp['sample_lon'] # depends on [control=['if'], data=[]]
else:
SiteRec['site_lon'] = '-999'
if 'sample_height' in list(samp.keys()):
SiteRec['site_height'] = samp['sample_height'] # depends on [control=['if'], data=[]]
Sites.append(SiteRec) # depends on [control=['for'], data=['samp']]
pmag.magic_write(site_file, Sites, 'er_sites')
return (True, meas_file)
|
def ConvCnstrMODOptionsDefaults(method='fista'):
"""Get defaults dict for the ConvCnstrMOD class specified by the
``method`` parameter.
"""
dflt = copy.deepcopy(ccmod_class_label_lookup(method).Options.defaults)
if method == 'fista':
dflt.update({'MaxMainIter': 1, 'BackTrack':
{'gamma_u': 1.2, 'MaxIter': 50}})
else:
dflt.update({'MaxMainIter': 1, 'AutoRho':
{'Period': 10, 'AutoScaling': False,
'RsdlRatio': 10.0, 'Scaling': 2.0,
'RsdlTarget': 1.0}})
return dflt
|
def function[ConvCnstrMODOptionsDefaults, parameter[method]]:
constant[Get defaults dict for the ConvCnstrMOD class specified by the
``method`` parameter.
]
variable[dflt] assign[=] call[name[copy].deepcopy, parameter[call[name[ccmod_class_label_lookup], parameter[name[method]]].Options.defaults]]
if compare[name[method] equal[==] constant[fista]] begin[:]
call[name[dflt].update, parameter[dictionary[[<ast.Constant object at 0x7da1b07f8910>, <ast.Constant object at 0x7da1b07f8e50>], [<ast.Constant object at 0x7da1b07f9120>, <ast.Dict object at 0x7da1b07f8550>]]]]
return[name[dflt]]
|
keyword[def] identifier[ConvCnstrMODOptionsDefaults] ( identifier[method] = literal[string] ):
literal[string]
identifier[dflt] = identifier[copy] . identifier[deepcopy] ( identifier[ccmod_class_label_lookup] ( identifier[method] ). identifier[Options] . identifier[defaults] )
keyword[if] identifier[method] == literal[string] :
identifier[dflt] . identifier[update] ({ literal[string] : literal[int] , literal[string] :
{ literal[string] : literal[int] , literal[string] : literal[int] }})
keyword[else] :
identifier[dflt] . identifier[update] ({ literal[string] : literal[int] , literal[string] :
{ literal[string] : literal[int] , literal[string] : keyword[False] ,
literal[string] : literal[int] , literal[string] : literal[int] ,
literal[string] : literal[int] }})
keyword[return] identifier[dflt]
|
def ConvCnstrMODOptionsDefaults(method='fista'):
"""Get defaults dict for the ConvCnstrMOD class specified by the
``method`` parameter.
"""
dflt = copy.deepcopy(ccmod_class_label_lookup(method).Options.defaults)
if method == 'fista':
dflt.update({'MaxMainIter': 1, 'BackTrack': {'gamma_u': 1.2, 'MaxIter': 50}}) # depends on [control=['if'], data=[]]
else:
dflt.update({'MaxMainIter': 1, 'AutoRho': {'Period': 10, 'AutoScaling': False, 'RsdlRatio': 10.0, 'Scaling': 2.0, 'RsdlTarget': 1.0}})
return dflt
|
def get_allowance(self, asset_name: str, from_address: str, to_address: str, is_full: bool = False) -> str:
"""
This interface is used to get the the allowance
from transfer-from account to transfer-to account in current network.
:param asset_name:
:param from_address: a base58 encoded account address.
:param to_address: a base58 encoded account address.
:param is_full:
:return: the information of allowance in dictionary form.
"""
payload = self.generate_json_rpc_payload(RpcMethod.GET_ALLOWANCE, [asset_name, from_address, to_address])
response = self.__post(self.__url, payload)
if is_full:
return response
return response['result']
|
def function[get_allowance, parameter[self, asset_name, from_address, to_address, is_full]]:
constant[
This interface is used to get the the allowance
from transfer-from account to transfer-to account in current network.
:param asset_name:
:param from_address: a base58 encoded account address.
:param to_address: a base58 encoded account address.
:param is_full:
:return: the information of allowance in dictionary form.
]
variable[payload] assign[=] call[name[self].generate_json_rpc_payload, parameter[name[RpcMethod].GET_ALLOWANCE, list[[<ast.Name object at 0x7da20c6c4dc0>, <ast.Name object at 0x7da20c6c4ca0>, <ast.Name object at 0x7da20c6c6c20>]]]]
variable[response] assign[=] call[name[self].__post, parameter[name[self].__url, name[payload]]]
if name[is_full] begin[:]
return[name[response]]
return[call[name[response]][constant[result]]]
|
keyword[def] identifier[get_allowance] ( identifier[self] , identifier[asset_name] : identifier[str] , identifier[from_address] : identifier[str] , identifier[to_address] : identifier[str] , identifier[is_full] : identifier[bool] = keyword[False] )-> identifier[str] :
literal[string]
identifier[payload] = identifier[self] . identifier[generate_json_rpc_payload] ( identifier[RpcMethod] . identifier[GET_ALLOWANCE] ,[ identifier[asset_name] , identifier[from_address] , identifier[to_address] ])
identifier[response] = identifier[self] . identifier[__post] ( identifier[self] . identifier[__url] , identifier[payload] )
keyword[if] identifier[is_full] :
keyword[return] identifier[response]
keyword[return] identifier[response] [ literal[string] ]
|
def get_allowance(self, asset_name: str, from_address: str, to_address: str, is_full: bool=False) -> str:
"""
This interface is used to get the the allowance
from transfer-from account to transfer-to account in current network.
:param asset_name:
:param from_address: a base58 encoded account address.
:param to_address: a base58 encoded account address.
:param is_full:
:return: the information of allowance in dictionary form.
"""
payload = self.generate_json_rpc_payload(RpcMethod.GET_ALLOWANCE, [asset_name, from_address, to_address])
response = self.__post(self.__url, payload)
if is_full:
return response # depends on [control=['if'], data=[]]
return response['result']
|
def _version_checker(module, minver):
"""Checks that module has a higher version that minver.
params:
- module: a module to test
- minver: a tuple of versions
"""
# We could use LooseVersion, but distutils imports imp which is deprecated
version_regexp = r'[a-z]?((?:\d|\.)+\d+)(?:\.dev[0-9]+)?'
version_tags = re.match(version_regexp, module.__version__)
if not version_tags:
return False
version_tags = version_tags.group(1).split(".")
version_tags = tuple(int(x) for x in version_tags)
return version_tags >= minver
|
def function[_version_checker, parameter[module, minver]]:
constant[Checks that module has a higher version that minver.
params:
- module: a module to test
- minver: a tuple of versions
]
variable[version_regexp] assign[=] constant[[a-z]?((?:\d|\.)+\d+)(?:\.dev[0-9]+)?]
variable[version_tags] assign[=] call[name[re].match, parameter[name[version_regexp], name[module].__version__]]
if <ast.UnaryOp object at 0x7da1b21c5420> begin[:]
return[constant[False]]
variable[version_tags] assign[=] call[call[name[version_tags].group, parameter[constant[1]]].split, parameter[constant[.]]]
variable[version_tags] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b21c45b0>]]
return[compare[name[version_tags] greater_or_equal[>=] name[minver]]]
|
keyword[def] identifier[_version_checker] ( identifier[module] , identifier[minver] ):
literal[string]
identifier[version_regexp] = literal[string]
identifier[version_tags] = identifier[re] . identifier[match] ( identifier[version_regexp] , identifier[module] . identifier[__version__] )
keyword[if] keyword[not] identifier[version_tags] :
keyword[return] keyword[False]
identifier[version_tags] = identifier[version_tags] . identifier[group] ( literal[int] ). identifier[split] ( literal[string] )
identifier[version_tags] = identifier[tuple] ( identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version_tags] )
keyword[return] identifier[version_tags] >= identifier[minver]
|
def _version_checker(module, minver):
"""Checks that module has a higher version that minver.
params:
- module: a module to test
- minver: a tuple of versions
"""
# We could use LooseVersion, but distutils imports imp which is deprecated
version_regexp = '[a-z]?((?:\\d|\\.)+\\d+)(?:\\.dev[0-9]+)?'
version_tags = re.match(version_regexp, module.__version__)
if not version_tags:
return False # depends on [control=['if'], data=[]]
version_tags = version_tags.group(1).split('.')
version_tags = tuple((int(x) for x in version_tags))
return version_tags >= minver
|
def delete_role(resource_root, service_name, name, cluster_name="default"):
"""
Delete a role by name
@param resource_root: The root Resource object.
@param service_name: Service name
@param name: Role name
@param cluster_name: Cluster name
@return: The deleted ApiRole object
"""
return call(resource_root.delete,
_get_role_path(cluster_name, service_name, name), ApiRole)
|
def function[delete_role, parameter[resource_root, service_name, name, cluster_name]]:
constant[
Delete a role by name
@param resource_root: The root Resource object.
@param service_name: Service name
@param name: Role name
@param cluster_name: Cluster name
@return: The deleted ApiRole object
]
return[call[name[call], parameter[name[resource_root].delete, call[name[_get_role_path], parameter[name[cluster_name], name[service_name], name[name]]], name[ApiRole]]]]
|
keyword[def] identifier[delete_role] ( identifier[resource_root] , identifier[service_name] , identifier[name] , identifier[cluster_name] = literal[string] ):
literal[string]
keyword[return] identifier[call] ( identifier[resource_root] . identifier[delete] ,
identifier[_get_role_path] ( identifier[cluster_name] , identifier[service_name] , identifier[name] ), identifier[ApiRole] )
|
def delete_role(resource_root, service_name, name, cluster_name='default'):
"""
Delete a role by name
@param resource_root: The root Resource object.
@param service_name: Service name
@param name: Role name
@param cluster_name: Cluster name
@return: The deleted ApiRole object
"""
return call(resource_root.delete, _get_role_path(cluster_name, service_name, name), ApiRole)
|
def get_all_regions_with_tiles(self):
"""
Generator which yields a set of (rx, ry) tuples which describe
all regions for which the world has tile data
"""
for key in self.get_all_keys():
(layer, rx, ry) = struct.unpack('>BHH', key)
if layer == 1:
yield (rx, ry)
|
def function[get_all_regions_with_tiles, parameter[self]]:
constant[
Generator which yields a set of (rx, ry) tuples which describe
all regions for which the world has tile data
]
for taget[name[key]] in starred[call[name[self].get_all_keys, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b0e313f0> assign[=] call[name[struct].unpack, parameter[constant[>BHH], name[key]]]
if compare[name[layer] equal[==] constant[1]] begin[:]
<ast.Yield object at 0x7da1b0e2eb60>
|
keyword[def] identifier[get_all_regions_with_tiles] ( identifier[self] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[get_all_keys] ():
( identifier[layer] , identifier[rx] , identifier[ry] )= identifier[struct] . identifier[unpack] ( literal[string] , identifier[key] )
keyword[if] identifier[layer] == literal[int] :
keyword[yield] ( identifier[rx] , identifier[ry] )
|
def get_all_regions_with_tiles(self):
"""
Generator which yields a set of (rx, ry) tuples which describe
all regions for which the world has tile data
"""
for key in self.get_all_keys():
(layer, rx, ry) = struct.unpack('>BHH', key)
if layer == 1:
yield (rx, ry) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
|
def as_id_dict(self):
"""
Return table as a dictionary mapping experiment_id, time_slide_id,
veto_def_name, and sim_proc_id (if it exists) to the expr_summ_id.
"""
d = {}
for row in self:
if row.experiment_id not in d:
d[row.experiment_id] = {}
if (row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id) in d[row.experiment_id]:
# entry already exists, raise error
raise KeyError("duplicate entries in experiment_summary table")
d[row.experiment_id][(row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id)] = row.experiment_summ_id
return d
|
def function[as_id_dict, parameter[self]]:
constant[
Return table as a dictionary mapping experiment_id, time_slide_id,
veto_def_name, and sim_proc_id (if it exists) to the expr_summ_id.
]
variable[d] assign[=] dictionary[[], []]
for taget[name[row]] in starred[name[self]] begin[:]
if compare[name[row].experiment_id <ast.NotIn object at 0x7da2590d7190> name[d]] begin[:]
call[name[d]][name[row].experiment_id] assign[=] dictionary[[], []]
if compare[tuple[[<ast.Attribute object at 0x7da1b0b70c70>, <ast.Attribute object at 0x7da1b0b72c50>, <ast.Attribute object at 0x7da1b0b71330>, <ast.Attribute object at 0x7da1b0b72740>]] in call[name[d]][name[row].experiment_id]] begin[:]
<ast.Raise object at 0x7da1b0b72320>
call[call[name[d]][name[row].experiment_id]][tuple[[<ast.Attribute object at 0x7da1b0b72500>, <ast.Attribute object at 0x7da1b0b71c30>, <ast.Attribute object at 0x7da1b0b70d30>, <ast.Attribute object at 0x7da1b0b73820>]]] assign[=] name[row].experiment_summ_id
return[name[d]]
|
keyword[def] identifier[as_id_dict] ( identifier[self] ):
literal[string]
identifier[d] ={}
keyword[for] identifier[row] keyword[in] identifier[self] :
keyword[if] identifier[row] . identifier[experiment_id] keyword[not] keyword[in] identifier[d] :
identifier[d] [ identifier[row] . identifier[experiment_id] ]={}
keyword[if] ( identifier[row] . identifier[time_slide_id] , identifier[row] . identifier[veto_def_name] , identifier[row] . identifier[datatype] , identifier[row] . identifier[sim_proc_id] ) keyword[in] identifier[d] [ identifier[row] . identifier[experiment_id] ]:
keyword[raise] identifier[KeyError] ( literal[string] )
identifier[d] [ identifier[row] . identifier[experiment_id] ][( identifier[row] . identifier[time_slide_id] , identifier[row] . identifier[veto_def_name] , identifier[row] . identifier[datatype] , identifier[row] . identifier[sim_proc_id] )]= identifier[row] . identifier[experiment_summ_id]
keyword[return] identifier[d]
|
def as_id_dict(self):
"""
Return table as a dictionary mapping experiment_id, time_slide_id,
veto_def_name, and sim_proc_id (if it exists) to the expr_summ_id.
"""
d = {}
for row in self:
if row.experiment_id not in d:
d[row.experiment_id] = {} # depends on [control=['if'], data=['d']]
if (row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id) in d[row.experiment_id]: # entry already exists, raise error
raise KeyError('duplicate entries in experiment_summary table') # depends on [control=['if'], data=[]]
d[row.experiment_id][row.time_slide_id, row.veto_def_name, row.datatype, row.sim_proc_id] = row.experiment_summ_id # depends on [control=['for'], data=['row']]
return d
|
def print_duplicate_anchor_information(duplicate_tags):
"""
Prints information about duplicate AnchorHub tags found during collection.
:param duplicate_tags: Dictionary mapping string file path keys to a list of
tuples. The tuples contain the following information, in order:
1. The string AnchorHub tag that was repeated
2. The line in the file that the duplicate was found, as a number
3. The string generated anchor that first used the repeated tag
"""
print("Duplicate anchors specified within file(s)")
print("Please modify your code to remove duplicates.\r\n")
for file_path in duplicate_tags:
print("File: " + file_path)
for line_info in duplicate_tags[file_path]:
print("\tLine " + str(line_info[1]) + # Line number
"\t#" + line_info[0] + # Repeated AnchorHub tag
" :\t" + line_info[2])
|
def function[print_duplicate_anchor_information, parameter[duplicate_tags]]:
constant[
Prints information about duplicate AnchorHub tags found during collection.
:param duplicate_tags: Dictionary mapping string file path keys to a list of
tuples. The tuples contain the following information, in order:
1. The string AnchorHub tag that was repeated
2. The line in the file that the duplicate was found, as a number
3. The string generated anchor that first used the repeated tag
]
call[name[print], parameter[constant[Duplicate anchors specified within file(s)]]]
call[name[print], parameter[constant[Please modify your code to remove duplicates.
]]]
for taget[name[file_path]] in starred[name[duplicate_tags]] begin[:]
call[name[print], parameter[binary_operation[constant[File: ] + name[file_path]]]]
for taget[name[line_info]] in starred[call[name[duplicate_tags]][name[file_path]]] begin[:]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[ Line ] + call[name[str], parameter[call[name[line_info]][constant[1]]]]] + constant[ #]] + call[name[line_info]][constant[0]]] + constant[ : ]] + call[name[line_info]][constant[2]]]]]
|
keyword[def] identifier[print_duplicate_anchor_information] ( identifier[duplicate_tags] ):
literal[string]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[for] identifier[file_path] keyword[in] identifier[duplicate_tags] :
identifier[print] ( literal[string] + identifier[file_path] )
keyword[for] identifier[line_info] keyword[in] identifier[duplicate_tags] [ identifier[file_path] ]:
identifier[print] ( literal[string] + identifier[str] ( identifier[line_info] [ literal[int] ])+
literal[string] + identifier[line_info] [ literal[int] ]+
literal[string] + identifier[line_info] [ literal[int] ])
|
def print_duplicate_anchor_information(duplicate_tags):
"""
Prints information about duplicate AnchorHub tags found during collection.
:param duplicate_tags: Dictionary mapping string file path keys to a list of
tuples. The tuples contain the following information, in order:
1. The string AnchorHub tag that was repeated
2. The line in the file that the duplicate was found, as a number
3. The string generated anchor that first used the repeated tag
"""
print('Duplicate anchors specified within file(s)')
print('Please modify your code to remove duplicates.\r\n')
for file_path in duplicate_tags:
print('File: ' + file_path)
for line_info in duplicate_tags[file_path]: # Line number
# Repeated AnchorHub tag
print('\tLine ' + str(line_info[1]) + '\t#' + line_info[0] + ' :\t' + line_info[2]) # depends on [control=['for'], data=['line_info']] # depends on [control=['for'], data=['file_path']]
|
def diagnostics_plot_chisq(self, ds, figname = "modelfit_chisqs.png"):
""" Produce a set of diagnostic plots for the model
Parameters
----------
(optional) chisq_dist_plot_name: str
Filename of output saved plot
"""
label_names = ds.get_plotting_labels()
lams = ds.wl
pivots = self.pivots
npixels = len(lams)
nlabels = len(pivots)
chisqs = self.chisqs
coeffs = self.coeffs
scatters = self.scatters
# Histogram of the chi squareds of ind. stars
plt.hist(np.sum(chisqs, axis=0), color='lightblue', alpha=0.7,
bins=int(np.sqrt(len(chisqs))))
dof = len(lams) - coeffs.shape[1] # for one star
plt.axvline(x=dof, c='k', linewidth=2, label="DOF")
plt.legend()
plt.title("Distribution of " + r"$\chi^2$" + " of the Model Fit")
plt.ylabel("Count")
plt.xlabel(r"$\chi^2$" + " of Individual Star")
print("Diagnostic plot: histogram of the red chi squareds of the fit")
print("Saved as %s" %figname)
plt.savefig(figname)
plt.close()
|
def function[diagnostics_plot_chisq, parameter[self, ds, figname]]:
constant[ Produce a set of diagnostic plots for the model
Parameters
----------
(optional) chisq_dist_plot_name: str
Filename of output saved plot
]
variable[label_names] assign[=] call[name[ds].get_plotting_labels, parameter[]]
variable[lams] assign[=] name[ds].wl
variable[pivots] assign[=] name[self].pivots
variable[npixels] assign[=] call[name[len], parameter[name[lams]]]
variable[nlabels] assign[=] call[name[len], parameter[name[pivots]]]
variable[chisqs] assign[=] name[self].chisqs
variable[coeffs] assign[=] name[self].coeffs
variable[scatters] assign[=] name[self].scatters
call[name[plt].hist, parameter[call[name[np].sum, parameter[name[chisqs]]]]]
variable[dof] assign[=] binary_operation[call[name[len], parameter[name[lams]]] - call[name[coeffs].shape][constant[1]]]
call[name[plt].axvline, parameter[]]
call[name[plt].legend, parameter[]]
call[name[plt].title, parameter[binary_operation[binary_operation[constant[Distribution of ] + constant[$\chi^2$]] + constant[ of the Model Fit]]]]
call[name[plt].ylabel, parameter[constant[Count]]]
call[name[plt].xlabel, parameter[binary_operation[constant[$\chi^2$] + constant[ of Individual Star]]]]
call[name[print], parameter[constant[Diagnostic plot: histogram of the red chi squareds of the fit]]]
call[name[print], parameter[binary_operation[constant[Saved as %s] <ast.Mod object at 0x7da2590d6920> name[figname]]]]
call[name[plt].savefig, parameter[name[figname]]]
call[name[plt].close, parameter[]]
|
keyword[def] identifier[diagnostics_plot_chisq] ( identifier[self] , identifier[ds] , identifier[figname] = literal[string] ):
literal[string]
identifier[label_names] = identifier[ds] . identifier[get_plotting_labels] ()
identifier[lams] = identifier[ds] . identifier[wl]
identifier[pivots] = identifier[self] . identifier[pivots]
identifier[npixels] = identifier[len] ( identifier[lams] )
identifier[nlabels] = identifier[len] ( identifier[pivots] )
identifier[chisqs] = identifier[self] . identifier[chisqs]
identifier[coeffs] = identifier[self] . identifier[coeffs]
identifier[scatters] = identifier[self] . identifier[scatters]
identifier[plt] . identifier[hist] ( identifier[np] . identifier[sum] ( identifier[chisqs] , identifier[axis] = literal[int] ), identifier[color] = literal[string] , identifier[alpha] = literal[int] ,
identifier[bins] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[len] ( identifier[chisqs] ))))
identifier[dof] = identifier[len] ( identifier[lams] )- identifier[coeffs] . identifier[shape] [ literal[int] ]
identifier[plt] . identifier[axvline] ( identifier[x] = identifier[dof] , identifier[c] = literal[string] , identifier[linewidth] = literal[int] , identifier[label] = literal[string] )
identifier[plt] . identifier[legend] ()
identifier[plt] . identifier[title] ( literal[string] + literal[string] + literal[string] )
identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] . identifier[xlabel] ( literal[string] + literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[figname] )
identifier[plt] . identifier[savefig] ( identifier[figname] )
identifier[plt] . identifier[close] ()
|
def diagnostics_plot_chisq(self, ds, figname='modelfit_chisqs.png'):
""" Produce a set of diagnostic plots for the model
Parameters
----------
(optional) chisq_dist_plot_name: str
Filename of output saved plot
"""
label_names = ds.get_plotting_labels()
lams = ds.wl
pivots = self.pivots
npixels = len(lams)
nlabels = len(pivots)
chisqs = self.chisqs
coeffs = self.coeffs
scatters = self.scatters
# Histogram of the chi squareds of ind. stars
plt.hist(np.sum(chisqs, axis=0), color='lightblue', alpha=0.7, bins=int(np.sqrt(len(chisqs))))
dof = len(lams) - coeffs.shape[1] # for one star
plt.axvline(x=dof, c='k', linewidth=2, label='DOF')
plt.legend()
plt.title('Distribution of ' + '$\\chi^2$' + ' of the Model Fit')
plt.ylabel('Count')
plt.xlabel('$\\chi^2$' + ' of Individual Star')
print('Diagnostic plot: histogram of the red chi squareds of the fit')
print('Saved as %s' % figname)
plt.savefig(figname)
plt.close()
|
def thermal_state(omega_level, T, return_diagonal=False):
r"""Return a thermal state for a given set of levels.
INPUT:
- ``omega_level`` - The angular frequencies of each state.
- ``T`` - The temperature of the ensemble (in Kelvin).
- ``return_diagonal`` - Whether to return only the populations.
>>> ground = State("Rb", 85, 5, 0, 1/Integer(2))
>>> magnetic_states = make_list_of_states([ground], "magnetic")
>>> omega_level = [ei.omega for ei in magnetic_states]
>>> T = 273.15 + 20
>>> print(thermal_state(omega_level, T, return_diagonal=True))
[0.0834 0.0834 0.0834 0.0834 0.0834 0.0833 0.0833 0.0833 0.0833 0.0833
0.0833 0.0833]
"""
Ne = len(omega_level)
E = np.array([hbar*omega_level[i] for i in range(Ne)])
p = np.exp(-E/k_B/T)
p = p/sum(p)
if not return_diagonal:
return np.diag(p)
return p
|
def function[thermal_state, parameter[omega_level, T, return_diagonal]]:
constant[Return a thermal state for a given set of levels.
INPUT:
- ``omega_level`` - The angular frequencies of each state.
- ``T`` - The temperature of the ensemble (in Kelvin).
- ``return_diagonal`` - Whether to return only the populations.
>>> ground = State("Rb", 85, 5, 0, 1/Integer(2))
>>> magnetic_states = make_list_of_states([ground], "magnetic")
>>> omega_level = [ei.omega for ei in magnetic_states]
>>> T = 273.15 + 20
>>> print(thermal_state(omega_level, T, return_diagonal=True))
[0.0834 0.0834 0.0834 0.0834 0.0834 0.0833 0.0833 0.0833 0.0833 0.0833
0.0833 0.0833]
]
variable[Ne] assign[=] call[name[len], parameter[name[omega_level]]]
variable[E] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1956cb0>]]
variable[p] assign[=] call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1956980> / name[k_B]] / name[T]]]]
variable[p] assign[=] binary_operation[name[p] / call[name[sum], parameter[name[p]]]]
if <ast.UnaryOp object at 0x7da1b1970550> begin[:]
return[call[name[np].diag, parameter[name[p]]]]
return[name[p]]
|
keyword[def] identifier[thermal_state] ( identifier[omega_level] , identifier[T] , identifier[return_diagonal] = keyword[False] ):
literal[string]
identifier[Ne] = identifier[len] ( identifier[omega_level] )
identifier[E] = identifier[np] . identifier[array] ([ identifier[hbar] * identifier[omega_level] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[Ne] )])
identifier[p] = identifier[np] . identifier[exp] (- identifier[E] / identifier[k_B] / identifier[T] )
identifier[p] = identifier[p] / identifier[sum] ( identifier[p] )
keyword[if] keyword[not] identifier[return_diagonal] :
keyword[return] identifier[np] . identifier[diag] ( identifier[p] )
keyword[return] identifier[p]
|
def thermal_state(omega_level, T, return_diagonal=False):
"""Return a thermal state for a given set of levels.
INPUT:
- ``omega_level`` - The angular frequencies of each state.
- ``T`` - The temperature of the ensemble (in Kelvin).
- ``return_diagonal`` - Whether to return only the populations.
>>> ground = State("Rb", 85, 5, 0, 1/Integer(2))
>>> magnetic_states = make_list_of_states([ground], "magnetic")
>>> omega_level = [ei.omega for ei in magnetic_states]
>>> T = 273.15 + 20
>>> print(thermal_state(omega_level, T, return_diagonal=True))
[0.0834 0.0834 0.0834 0.0834 0.0834 0.0833 0.0833 0.0833 0.0833 0.0833
0.0833 0.0833]
"""
Ne = len(omega_level)
E = np.array([hbar * omega_level[i] for i in range(Ne)])
p = np.exp(-E / k_B / T)
p = p / sum(p)
if not return_diagonal:
return np.diag(p) # depends on [control=['if'], data=[]]
return p
|
def rank_items(self, userid, user_items, selected_items, recalculate_user=False):
""" Rank given items for a user and returns sorted item list """
# check if selected_items contains itemids that are not in the model(user_items)
if max(selected_items) >= user_items.shape[1] or min(selected_items) < 0:
raise IndexError("Some of selected itemids are not in the model")
# calculate the relevance scores
liked_vector = user_items[userid]
recommendations = liked_vector.dot(self.similarity)
# remove items that are not in the selected_items
best = sorted(zip(recommendations.indices, recommendations.data), key=lambda x: -x[1])
ret = [rec for rec in best if rec[0] in selected_items]
# returned items should be equal to input selected items
for itemid in selected_items:
if itemid not in recommendations.indices:
ret.append((itemid, -1.0))
return ret
|
def function[rank_items, parameter[self, userid, user_items, selected_items, recalculate_user]]:
constant[ Rank given items for a user and returns sorted item list ]
if <ast.BoolOp object at 0x7da2047e9060> begin[:]
<ast.Raise object at 0x7da2047e8340>
variable[liked_vector] assign[=] call[name[user_items]][name[userid]]
variable[recommendations] assign[=] call[name[liked_vector].dot, parameter[name[self].similarity]]
variable[best] assign[=] call[name[sorted], parameter[call[name[zip], parameter[name[recommendations].indices, name[recommendations].data]]]]
variable[ret] assign[=] <ast.ListComp object at 0x7da1b2345cc0>
for taget[name[itemid]] in starred[name[selected_items]] begin[:]
if compare[name[itemid] <ast.NotIn object at 0x7da2590d7190> name[recommendations].indices] begin[:]
call[name[ret].append, parameter[tuple[[<ast.Name object at 0x7da1b2346ec0>, <ast.UnaryOp object at 0x7da1b2345ed0>]]]]
return[name[ret]]
|
keyword[def] identifier[rank_items] ( identifier[self] , identifier[userid] , identifier[user_items] , identifier[selected_items] , identifier[recalculate_user] = keyword[False] ):
literal[string]
keyword[if] identifier[max] ( identifier[selected_items] )>= identifier[user_items] . identifier[shape] [ literal[int] ] keyword[or] identifier[min] ( identifier[selected_items] )< literal[int] :
keyword[raise] identifier[IndexError] ( literal[string] )
identifier[liked_vector] = identifier[user_items] [ identifier[userid] ]
identifier[recommendations] = identifier[liked_vector] . identifier[dot] ( identifier[self] . identifier[similarity] )
identifier[best] = identifier[sorted] ( identifier[zip] ( identifier[recommendations] . identifier[indices] , identifier[recommendations] . identifier[data] ), identifier[key] = keyword[lambda] identifier[x] :- identifier[x] [ literal[int] ])
identifier[ret] =[ identifier[rec] keyword[for] identifier[rec] keyword[in] identifier[best] keyword[if] identifier[rec] [ literal[int] ] keyword[in] identifier[selected_items] ]
keyword[for] identifier[itemid] keyword[in] identifier[selected_items] :
keyword[if] identifier[itemid] keyword[not] keyword[in] identifier[recommendations] . identifier[indices] :
identifier[ret] . identifier[append] (( identifier[itemid] ,- literal[int] ))
keyword[return] identifier[ret]
|
def rank_items(self, userid, user_items, selected_items, recalculate_user=False):
""" Rank given items for a user and returns sorted item list """
# check if selected_items contains itemids that are not in the model(user_items)
if max(selected_items) >= user_items.shape[1] or min(selected_items) < 0:
raise IndexError('Some of selected itemids are not in the model') # depends on [control=['if'], data=[]]
# calculate the relevance scores
liked_vector = user_items[userid]
recommendations = liked_vector.dot(self.similarity)
# remove items that are not in the selected_items
best = sorted(zip(recommendations.indices, recommendations.data), key=lambda x: -x[1])
ret = [rec for rec in best if rec[0] in selected_items]
# returned items should be equal to input selected items
for itemid in selected_items:
if itemid not in recommendations.indices:
ret.append((itemid, -1.0)) # depends on [control=['if'], data=['itemid']] # depends on [control=['for'], data=['itemid']]
return ret
|
def from_info(cls, container, info_obj):
"""Create from subdirectory or file info object."""
create_fn = cls.from_subdir if 'subdir' in info_obj \
else cls.from_file_info
return create_fn(container, info_obj)
|
def function[from_info, parameter[cls, container, info_obj]]:
constant[Create from subdirectory or file info object.]
variable[create_fn] assign[=] <ast.IfExp object at 0x7da18f813310>
return[call[name[create_fn], parameter[name[container], name[info_obj]]]]
|
keyword[def] identifier[from_info] ( identifier[cls] , identifier[container] , identifier[info_obj] ):
literal[string]
identifier[create_fn] = identifier[cls] . identifier[from_subdir] keyword[if] literal[string] keyword[in] identifier[info_obj] keyword[else] identifier[cls] . identifier[from_file_info]
keyword[return] identifier[create_fn] ( identifier[container] , identifier[info_obj] )
|
def from_info(cls, container, info_obj):
"""Create from subdirectory or file info object."""
create_fn = cls.from_subdir if 'subdir' in info_obj else cls.from_file_info
return create_fn(container, info_obj)
|
def stopService(self):
"""
Stop the writer thread, wait for it to finish.
"""
Service.stopService(self)
removeDestination(self)
self._reactor.callFromThread(self._reactor.stop)
return deferToThreadPool(
self._mainReactor,
self._mainReactor.getThreadPool(), self._thread.join)
|
def function[stopService, parameter[self]]:
constant[
Stop the writer thread, wait for it to finish.
]
call[name[Service].stopService, parameter[name[self]]]
call[name[removeDestination], parameter[name[self]]]
call[name[self]._reactor.callFromThread, parameter[name[self]._reactor.stop]]
return[call[name[deferToThreadPool], parameter[name[self]._mainReactor, call[name[self]._mainReactor.getThreadPool, parameter[]], name[self]._thread.join]]]
|
keyword[def] identifier[stopService] ( identifier[self] ):
literal[string]
identifier[Service] . identifier[stopService] ( identifier[self] )
identifier[removeDestination] ( identifier[self] )
identifier[self] . identifier[_reactor] . identifier[callFromThread] ( identifier[self] . identifier[_reactor] . identifier[stop] )
keyword[return] identifier[deferToThreadPool] (
identifier[self] . identifier[_mainReactor] ,
identifier[self] . identifier[_mainReactor] . identifier[getThreadPool] (), identifier[self] . identifier[_thread] . identifier[join] )
|
def stopService(self):
"""
Stop the writer thread, wait for it to finish.
"""
Service.stopService(self)
removeDestination(self)
self._reactor.callFromThread(self._reactor.stop)
return deferToThreadPool(self._mainReactor, self._mainReactor.getThreadPool(), self._thread.join)
|
def get_string(dev, index, langid = None):
r"""Retrieve a string descriptor from the device.
dev is the Device object which the string will be read from.
index is the string descriptor index and langid is the Language
ID of the descriptor. If langid is omitted, the string descriptor
of the first Language ID will be returned.
Zero is never the index of a real string. The USB spec allows a device to
use zero in a string index field to indicate that no string is provided.
So the caller does not have to treat that case specially, this function
returns None if passed an index of zero, and generates no traffic
to the device.
The return value is the unicode string present in the descriptor, or None
if the requested index was zero.
It is a ValueError to request a real string (index not zero), if: the
device's langid tuple is empty, or with an explicit langid the device does
not support.
"""
if 0 == index:
return None
from usb.control import get_descriptor
langids = dev.langids
if 0 == len(langids):
raise ValueError("The device has no langid")
if langid is None:
langid = langids[0]
elif langid not in langids:
raise ValueError("The device does not support the specified langid")
buf = get_descriptor(
dev,
255, # Maximum descriptor size
DESC_TYPE_STRING,
index,
langid
)
if hexversion >= 0x03020000:
return buf[2:buf[0]].tobytes().decode('utf-16-le')
else:
return buf[2:buf[0]].tostring().decode('utf-16-le')
|
def function[get_string, parameter[dev, index, langid]]:
constant[Retrieve a string descriptor from the device.
dev is the Device object which the string will be read from.
index is the string descriptor index and langid is the Language
ID of the descriptor. If langid is omitted, the string descriptor
of the first Language ID will be returned.
Zero is never the index of a real string. The USB spec allows a device to
use zero in a string index field to indicate that no string is provided.
So the caller does not have to treat that case specially, this function
returns None if passed an index of zero, and generates no traffic
to the device.
The return value is the unicode string present in the descriptor, or None
if the requested index was zero.
It is a ValueError to request a real string (index not zero), if: the
device's langid tuple is empty, or with an explicit langid the device does
not support.
]
if compare[constant[0] equal[==] name[index]] begin[:]
return[constant[None]]
from relative_module[usb.control] import module[get_descriptor]
variable[langids] assign[=] name[dev].langids
if compare[constant[0] equal[==] call[name[len], parameter[name[langids]]]] begin[:]
<ast.Raise object at 0x7da18ede6a40>
if compare[name[langid] is constant[None]] begin[:]
variable[langid] assign[=] call[name[langids]][constant[0]]
variable[buf] assign[=] call[name[get_descriptor], parameter[name[dev], constant[255], name[DESC_TYPE_STRING], name[index], name[langid]]]
if compare[name[hexversion] greater_or_equal[>=] constant[50462720]] begin[:]
return[call[call[call[name[buf]][<ast.Slice object at 0x7da18fe93fa0>].tobytes, parameter[]].decode, parameter[constant[utf-16-le]]]]
|
keyword[def] identifier[get_string] ( identifier[dev] , identifier[index] , identifier[langid] = keyword[None] ):
literal[string]
keyword[if] literal[int] == identifier[index] :
keyword[return] keyword[None]
keyword[from] identifier[usb] . identifier[control] keyword[import] identifier[get_descriptor]
identifier[langids] = identifier[dev] . identifier[langids]
keyword[if] literal[int] == identifier[len] ( identifier[langids] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[langid] keyword[is] keyword[None] :
identifier[langid] = identifier[langids] [ literal[int] ]
keyword[elif] identifier[langid] keyword[not] keyword[in] identifier[langids] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[buf] = identifier[get_descriptor] (
identifier[dev] ,
literal[int] ,
identifier[DESC_TYPE_STRING] ,
identifier[index] ,
identifier[langid]
)
keyword[if] identifier[hexversion] >= literal[int] :
keyword[return] identifier[buf] [ literal[int] : identifier[buf] [ literal[int] ]]. identifier[tobytes] (). identifier[decode] ( literal[string] )
keyword[else] :
keyword[return] identifier[buf] [ literal[int] : identifier[buf] [ literal[int] ]]. identifier[tostring] (). identifier[decode] ( literal[string] )
|
def get_string(dev, index, langid=None):
"""Retrieve a string descriptor from the device.
dev is the Device object which the string will be read from.
index is the string descriptor index and langid is the Language
ID of the descriptor. If langid is omitted, the string descriptor
of the first Language ID will be returned.
Zero is never the index of a real string. The USB spec allows a device to
use zero in a string index field to indicate that no string is provided.
So the caller does not have to treat that case specially, this function
returns None if passed an index of zero, and generates no traffic
to the device.
The return value is the unicode string present in the descriptor, or None
if the requested index was zero.
It is a ValueError to request a real string (index not zero), if: the
device's langid tuple is empty, or with an explicit langid the device does
not support.
"""
if 0 == index:
return None # depends on [control=['if'], data=[]]
from usb.control import get_descriptor
langids = dev.langids
if 0 == len(langids):
raise ValueError('The device has no langid') # depends on [control=['if'], data=[]]
if langid is None:
langid = langids[0] # depends on [control=['if'], data=['langid']]
elif langid not in langids:
raise ValueError('The device does not support the specified langid') # depends on [control=['if'], data=[]] # Maximum descriptor size
buf = get_descriptor(dev, 255, DESC_TYPE_STRING, index, langid)
if hexversion >= 50462720:
return buf[2:buf[0]].tobytes().decode('utf-16-le') # depends on [control=['if'], data=[]]
else:
return buf[2:buf[0]].tostring().decode('utf-16-le')
|
def startup(self):
"""Startup the ec2 instance
"""
import boto.ec2
if not self.browser_config.get('launch'):
self.warning_log("Skipping launch")
return True
self.info_log("Starting up")
instance = None
try:
# KEY NAME
key_name = self.browser_config.get(
"ssh_key_path"
).split(os.sep)[-1][:-4]
# SECURITY GROUP
if type(self.browser_config.get("security_group_ids")) == str:
security_group_ids = [
self.browser_config.get("security_group_ids")
]
elif type(self.browser_config.get("security_group_ids")) == list:
security_group_ids = self.browser_config.get(
"security_group_ids"
)
else:
msg = "The config security_group_ids must be a string or a list of string" # noqa
self.critial_log(msg)
raise Exception(msg)
# LAUNCH INSTANCE
ec2 = boto.ec2.connect_to_region(self.browser_config.get("region"))
reservation = ec2.run_instances(
self.browser_config.get('amiid'),
key_name=key_name,
instance_type=self.browser_config.get("instance_type"),
security_group_ids=security_group_ids
)
wait_after_instance_launched = BROME_CONFIG['ec2']['wait_after_instance_launched'] # noqa
if wait_after_instance_launched:
self.info_log(
"Waiting after instance launched: %s seconds..." %
wait_after_instance_launched
)
sleep(wait_after_instance_launched)
else:
self.warning_log("Skipping waiting after instance launched")
try:
instance = reservation.instances[0]
except Exception as e:
self.critical_log(
'Instance reservation exception: %s' % str(e)
)
raise
self.instance_id = instance.id
self.info_log('Waiting for the instance to start...')
for i in range(60*5):
try:
status = instance.update()
if status == 'running':
break
else:
sleep(1)
except Exception as e:
self.error_log(
'Exception while wait pending: %s' % str(e)
)
sleep(1)
# Wait until instance is running
status = instance.update()
if status == 'running':
instance.add_tag(
"Name",
"%s-selenium-node-%s-%s" %
(
self.browser_config.get('platform'),
self.browser_config.get('browserName'),
self.index
)
)
self.info_log(
"New instance (%s) public ip (%s) private ip (%s)" % (
instance.id,
instance.ip_address,
instance.private_ip_address
)
)
else:
msg = "Instance status is %s and should be (running)" % status
self.error_log(msg)
raise Exception(msg)
if BROME_CONFIG['ec2']['wait_until_system_and_instance_check_performed']: # noqa
check_successful = False
for i in range(5*60):
try:
if not i % 60:
if not type(status) == str:
self.info_log(
'System_status: %s, instance_status: %s' %
(
status.system_status,
status.instance_status
)
)
status = ec2.get_all_instance_status(
instance_ids=[instance.id]
)[0]
if status.system_status.status == u'ok' and status.instance_status.status == u'ok': # noqa
self.info_log('system_status: %s, instance_status: %s' % (status.system_status, status.instance_status)) # noqa
check_successful = True
break
except Exception as e:
self.error_log('Waiting instance ready exception: %s' % str(e)) # noqa
sleep(1)
if not check_successful:
msg = "System and instance check were not successful"
self.warning_log(msg)
raise Exception(msg)
else:
self.warning_log("Skipping wait until system and instance check performed") # noqa
self.info_log('Starting the selenium node server')
self.private_ip = instance.private_ip_address
self.public_dns = instance.public_dns_name
self.private_dns = instance.private_dns_name
self.public_ip = instance.ip_address
# LINUX
if self.browser_config.get('platform').lower() == "linux":
command = self.browser_config.get(
"selenium_command"
).format(**self.browser_config.config)
self.execute_command(command)
elif self.browser_config.get('platform').upper() == "windows":
# TODO this code is out of date
config = self.browser_config.config.copy()
config['instance_ip_address'] = instance.ip_address
command = self.browser_config(
"selenium_command"
).format(**config)
process = Popen(
command.split(" "),
stdout=devnull,
stderr=devnull
)
self.runner.xvfb_pids.append(process.pid)
else:
msg = "The provided platform name is not supported: select either (WINDOWS) or (LINUX)" # noqa
self.critical_log(msg)
raise Exception(msg)
return True
except Exception as e:
self.error_log('Startup exception: %s' % str(e))
raise
|
def function[startup, parameter[self]]:
constant[Startup the ec2 instance
]
import module[boto.ec2]
if <ast.UnaryOp object at 0x7da2045645b0> begin[:]
call[name[self].warning_log, parameter[constant[Skipping launch]]]
return[constant[True]]
call[name[self].info_log, parameter[constant[Starting up]]]
variable[instance] assign[=] constant[None]
<ast.Try object at 0x7da204564340>
|
keyword[def] identifier[startup] ( identifier[self] ):
literal[string]
keyword[import] identifier[boto] . identifier[ec2]
keyword[if] keyword[not] identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ):
identifier[self] . identifier[warning_log] ( literal[string] )
keyword[return] keyword[True]
identifier[self] . identifier[info_log] ( literal[string] )
identifier[instance] = keyword[None]
keyword[try] :
identifier[key_name] = identifier[self] . identifier[browser_config] . identifier[get] (
literal[string]
). identifier[split] ( identifier[os] . identifier[sep] )[- literal[int] ][:- literal[int] ]
keyword[if] identifier[type] ( identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ))== identifier[str] :
identifier[security_group_ids] =[
identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] )
]
keyword[elif] identifier[type] ( identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ))== identifier[list] :
identifier[security_group_ids] = identifier[self] . identifier[browser_config] . identifier[get] (
literal[string]
)
keyword[else] :
identifier[msg] = literal[string]
identifier[self] . identifier[critial_log] ( identifier[msg] )
keyword[raise] identifier[Exception] ( identifier[msg] )
identifier[ec2] = identifier[boto] . identifier[ec2] . identifier[connect_to_region] ( identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ))
identifier[reservation] = identifier[ec2] . identifier[run_instances] (
identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ),
identifier[key_name] = identifier[key_name] ,
identifier[instance_type] = identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ),
identifier[security_group_ids] = identifier[security_group_ids]
)
identifier[wait_after_instance_launched] = identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
keyword[if] identifier[wait_after_instance_launched] :
identifier[self] . identifier[info_log] (
literal[string] %
identifier[wait_after_instance_launched]
)
identifier[sleep] ( identifier[wait_after_instance_launched] )
keyword[else] :
identifier[self] . identifier[warning_log] ( literal[string] )
keyword[try] :
identifier[instance] = identifier[reservation] . identifier[instances] [ literal[int] ]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[critical_log] (
literal[string] % identifier[str] ( identifier[e] )
)
keyword[raise]
identifier[self] . identifier[instance_id] = identifier[instance] . identifier[id]
identifier[self] . identifier[info_log] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] * literal[int] ):
keyword[try] :
identifier[status] = identifier[instance] . identifier[update] ()
keyword[if] identifier[status] == literal[string] :
keyword[break]
keyword[else] :
identifier[sleep] ( literal[int] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[error_log] (
literal[string] % identifier[str] ( identifier[e] )
)
identifier[sleep] ( literal[int] )
identifier[status] = identifier[instance] . identifier[update] ()
keyword[if] identifier[status] == literal[string] :
identifier[instance] . identifier[add_tag] (
literal[string] ,
literal[string] %
(
identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ),
identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ),
identifier[self] . identifier[index]
)
)
identifier[self] . identifier[info_log] (
literal[string] %(
identifier[instance] . identifier[id] ,
identifier[instance] . identifier[ip_address] ,
identifier[instance] . identifier[private_ip_address]
)
)
keyword[else] :
identifier[msg] = literal[string] % identifier[status]
identifier[self] . identifier[error_log] ( identifier[msg] )
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[if] identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]:
identifier[check_successful] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] * literal[int] ):
keyword[try] :
keyword[if] keyword[not] identifier[i] % literal[int] :
keyword[if] keyword[not] identifier[type] ( identifier[status] )== identifier[str] :
identifier[self] . identifier[info_log] (
literal[string] %
(
identifier[status] . identifier[system_status] ,
identifier[status] . identifier[instance_status]
)
)
identifier[status] = identifier[ec2] . identifier[get_all_instance_status] (
identifier[instance_ids] =[ identifier[instance] . identifier[id] ]
)[ literal[int] ]
keyword[if] identifier[status] . identifier[system_status] . identifier[status] == literal[string] keyword[and] identifier[status] . identifier[instance_status] . identifier[status] == literal[string] :
identifier[self] . identifier[info_log] ( literal[string] %( identifier[status] . identifier[system_status] , identifier[status] . identifier[instance_status] ))
identifier[check_successful] = keyword[True]
keyword[break]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[error_log] ( literal[string] % identifier[str] ( identifier[e] ))
identifier[sleep] ( literal[int] )
keyword[if] keyword[not] identifier[check_successful] :
identifier[msg] = literal[string]
identifier[self] . identifier[warning_log] ( identifier[msg] )
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[else] :
identifier[self] . identifier[warning_log] ( literal[string] )
identifier[self] . identifier[info_log] ( literal[string] )
identifier[self] . identifier[private_ip] = identifier[instance] . identifier[private_ip_address]
identifier[self] . identifier[public_dns] = identifier[instance] . identifier[public_dns_name]
identifier[self] . identifier[private_dns] = identifier[instance] . identifier[private_dns_name]
identifier[self] . identifier[public_ip] = identifier[instance] . identifier[ip_address]
keyword[if] identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ). identifier[lower] ()== literal[string] :
identifier[command] = identifier[self] . identifier[browser_config] . identifier[get] (
literal[string]
). identifier[format] (** identifier[self] . identifier[browser_config] . identifier[config] )
identifier[self] . identifier[execute_command] ( identifier[command] )
keyword[elif] identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ). identifier[upper] ()== literal[string] :
identifier[config] = identifier[self] . identifier[browser_config] . identifier[config] . identifier[copy] ()
identifier[config] [ literal[string] ]= identifier[instance] . identifier[ip_address]
identifier[command] = identifier[self] . identifier[browser_config] (
literal[string]
). identifier[format] (** identifier[config] )
identifier[process] = identifier[Popen] (
identifier[command] . identifier[split] ( literal[string] ),
identifier[stdout] = identifier[devnull] ,
identifier[stderr] = identifier[devnull]
)
identifier[self] . identifier[runner] . identifier[xvfb_pids] . identifier[append] ( identifier[process] . identifier[pid] )
keyword[else] :
identifier[msg] = literal[string]
identifier[self] . identifier[critical_log] ( identifier[msg] )
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[return] keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[error_log] ( literal[string] % identifier[str] ( identifier[e] ))
keyword[raise]
|
def startup(self):
"""Startup the ec2 instance
"""
import boto.ec2
if not self.browser_config.get('launch'):
self.warning_log('Skipping launch')
return True # depends on [control=['if'], data=[]]
self.info_log('Starting up')
instance = None
try:
# KEY NAME
key_name = self.browser_config.get('ssh_key_path').split(os.sep)[-1][:-4]
# SECURITY GROUP
if type(self.browser_config.get('security_group_ids')) == str:
security_group_ids = [self.browser_config.get('security_group_ids')] # depends on [control=['if'], data=[]]
elif type(self.browser_config.get('security_group_ids')) == list:
security_group_ids = self.browser_config.get('security_group_ids') # depends on [control=['if'], data=[]]
else:
msg = 'The config security_group_ids must be a string or a list of string' # noqa
self.critial_log(msg)
raise Exception(msg)
# LAUNCH INSTANCE
ec2 = boto.ec2.connect_to_region(self.browser_config.get('region'))
reservation = ec2.run_instances(self.browser_config.get('amiid'), key_name=key_name, instance_type=self.browser_config.get('instance_type'), security_group_ids=security_group_ids)
wait_after_instance_launched = BROME_CONFIG['ec2']['wait_after_instance_launched'] # noqa
if wait_after_instance_launched:
self.info_log('Waiting after instance launched: %s seconds...' % wait_after_instance_launched)
sleep(wait_after_instance_launched) # depends on [control=['if'], data=[]]
else:
self.warning_log('Skipping waiting after instance launched')
try:
instance = reservation.instances[0] # depends on [control=['try'], data=[]]
except Exception as e:
self.critical_log('Instance reservation exception: %s' % str(e))
raise # depends on [control=['except'], data=['e']]
self.instance_id = instance.id
self.info_log('Waiting for the instance to start...')
for i in range(60 * 5):
try:
status = instance.update()
if status == 'running':
break # depends on [control=['if'], data=[]]
else:
sleep(1) # depends on [control=['try'], data=[]]
except Exception as e:
self.error_log('Exception while wait pending: %s' % str(e))
sleep(1) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]]
# Wait until instance is running
status = instance.update()
if status == 'running':
instance.add_tag('Name', '%s-selenium-node-%s-%s' % (self.browser_config.get('platform'), self.browser_config.get('browserName'), self.index))
self.info_log('New instance (%s) public ip (%s) private ip (%s)' % (instance.id, instance.ip_address, instance.private_ip_address)) # depends on [control=['if'], data=[]]
else:
msg = 'Instance status is %s and should be (running)' % status
self.error_log(msg)
raise Exception(msg)
if BROME_CONFIG['ec2']['wait_until_system_and_instance_check_performed']: # noqa
check_successful = False
for i in range(5 * 60):
try:
if not i % 60:
if not type(status) == str:
self.info_log('System_status: %s, instance_status: %s' % (status.system_status, status.instance_status)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
status = ec2.get_all_instance_status(instance_ids=[instance.id])[0]
if status.system_status.status == u'ok' and status.instance_status.status == u'ok': # noqa
self.info_log('system_status: %s, instance_status: %s' % (status.system_status, status.instance_status)) # noqa
check_successful = True
break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
self.error_log('Waiting instance ready exception: %s' % str(e)) # noqa # depends on [control=['except'], data=['e']]
sleep(1) # depends on [control=['for'], data=['i']]
if not check_successful:
msg = 'System and instance check were not successful'
self.warning_log(msg)
raise Exception(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.warning_log('Skipping wait until system and instance check performed') # noqa
self.info_log('Starting the selenium node server')
self.private_ip = instance.private_ip_address
self.public_dns = instance.public_dns_name
self.private_dns = instance.private_dns_name
self.public_ip = instance.ip_address
# LINUX
if self.browser_config.get('platform').lower() == 'linux':
command = self.browser_config.get('selenium_command').format(**self.browser_config.config)
self.execute_command(command) # depends on [control=['if'], data=[]]
elif self.browser_config.get('platform').upper() == 'windows':
# TODO this code is out of date
config = self.browser_config.config.copy()
config['instance_ip_address'] = instance.ip_address
command = self.browser_config('selenium_command').format(**config)
process = Popen(command.split(' '), stdout=devnull, stderr=devnull)
self.runner.xvfb_pids.append(process.pid) # depends on [control=['if'], data=[]]
else:
msg = 'The provided platform name is not supported: select either (WINDOWS) or (LINUX)' # noqa
self.critical_log(msg)
raise Exception(msg)
return True # depends on [control=['try'], data=[]]
except Exception as e:
self.error_log('Startup exception: %s' % str(e))
raise # depends on [control=['except'], data=['e']]
|
def dfs_grid(grid, i, j, mark='X', free='.'):
"""DFS on a grid, mark connected component, iterative version
:param grid: matrix, 4-neighborhood
:param i,j: cell in this matrix, start of DFS exploration
:param free: symbol for walkable cells
:param mark: symbol to overwrite visited vertices
:complexity: linear
"""
height = len(grid)
width = len(grid[0])
to_visit = [(i, j)]
grid[i][j] = mark
while to_visit:
i1, j1 = to_visit.pop()
for i2, j2 in [(i1 + 1, j1), (i1, j1 + 1),
(i1 - 1, j1), (i1, j1 - 1)]:
if (0 <= i2 < height and 0 <= j2 < width and
grid[i2][j2] == free):
grid[i2][j2] = mark # mark path
to_visit.append((i2, j2))
|
def function[dfs_grid, parameter[grid, i, j, mark, free]]:
constant[DFS on a grid, mark connected component, iterative version
:param grid: matrix, 4-neighborhood
:param i,j: cell in this matrix, start of DFS exploration
:param free: symbol for walkable cells
:param mark: symbol to overwrite visited vertices
:complexity: linear
]
variable[height] assign[=] call[name[len], parameter[name[grid]]]
variable[width] assign[=] call[name[len], parameter[call[name[grid]][constant[0]]]]
variable[to_visit] assign[=] list[[<ast.Tuple object at 0x7da1b07f6290>]]
call[call[name[grid]][name[i]]][name[j]] assign[=] name[mark]
while name[to_visit] begin[:]
<ast.Tuple object at 0x7da1b07f5a20> assign[=] call[name[to_visit].pop, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b07f51e0>, <ast.Name object at 0x7da1b07f4c70>]]] in starred[list[[<ast.Tuple object at 0x7da1b07f67a0>, <ast.Tuple object at 0x7da1b07f53f0>, <ast.Tuple object at 0x7da1b07f4760>, <ast.Tuple object at 0x7da1b07f5c60>]]] begin[:]
if <ast.BoolOp object at 0x7da1b07f61d0> begin[:]
call[call[name[grid]][name[i2]]][name[j2]] assign[=] name[mark]
call[name[to_visit].append, parameter[tuple[[<ast.Name object at 0x7da20cabdbd0>, <ast.Name object at 0x7da20cabd180>]]]]
|
keyword[def] identifier[dfs_grid] ( identifier[grid] , identifier[i] , identifier[j] , identifier[mark] = literal[string] , identifier[free] = literal[string] ):
literal[string]
identifier[height] = identifier[len] ( identifier[grid] )
identifier[width] = identifier[len] ( identifier[grid] [ literal[int] ])
identifier[to_visit] =[( identifier[i] , identifier[j] )]
identifier[grid] [ identifier[i] ][ identifier[j] ]= identifier[mark]
keyword[while] identifier[to_visit] :
identifier[i1] , identifier[j1] = identifier[to_visit] . identifier[pop] ()
keyword[for] identifier[i2] , identifier[j2] keyword[in] [( identifier[i1] + literal[int] , identifier[j1] ),( identifier[i1] , identifier[j1] + literal[int] ),
( identifier[i1] - literal[int] , identifier[j1] ),( identifier[i1] , identifier[j1] - literal[int] )]:
keyword[if] ( literal[int] <= identifier[i2] < identifier[height] keyword[and] literal[int] <= identifier[j2] < identifier[width] keyword[and]
identifier[grid] [ identifier[i2] ][ identifier[j2] ]== identifier[free] ):
identifier[grid] [ identifier[i2] ][ identifier[j2] ]= identifier[mark]
identifier[to_visit] . identifier[append] (( identifier[i2] , identifier[j2] ))
|
def dfs_grid(grid, i, j, mark='X', free='.'):
"""DFS on a grid, mark connected component, iterative version
:param grid: matrix, 4-neighborhood
:param i,j: cell in this matrix, start of DFS exploration
:param free: symbol for walkable cells
:param mark: symbol to overwrite visited vertices
:complexity: linear
"""
height = len(grid)
width = len(grid[0])
to_visit = [(i, j)]
grid[i][j] = mark
while to_visit:
(i1, j1) = to_visit.pop()
for (i2, j2) in [(i1 + 1, j1), (i1, j1 + 1), (i1 - 1, j1), (i1, j1 - 1)]:
if 0 <= i2 < height and 0 <= j2 < width and (grid[i2][j2] == free):
grid[i2][j2] = mark # mark path
to_visit.append((i2, j2)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
|
def diffPrefsPrior(priorstring):
"""Parses `priorstring` and returns `prior` tuple."""
assert isinstance(priorstring, str)
prior = priorstring.split(',')
if len(prior) == 3 and prior[0] == 'invquadratic':
[c1, c2] = [float(x) for x in prior[1 : ]]
assert c1 > 0 and c2 > 0, "C1 and C2 must be > 1 for invquadratic prior"
return ('invquadratic', c1, c2)
else:
raise ValueError("Invalid diffprefsprior: {0}".format(priorstring))
|
def function[diffPrefsPrior, parameter[priorstring]]:
constant[Parses `priorstring` and returns `prior` tuple.]
assert[call[name[isinstance], parameter[name[priorstring], name[str]]]]
variable[prior] assign[=] call[name[priorstring].split, parameter[constant[,]]]
if <ast.BoolOp object at 0x7da1b0a6cfa0> begin[:]
<ast.List object at 0x7da1b0a6cc10> assign[=] <ast.ListComp object at 0x7da1b0a6dd80>
assert[<ast.BoolOp object at 0x7da18f58d330>]
return[tuple[[<ast.Constant object at 0x7da2054a6e30>, <ast.Name object at 0x7da2054a6ce0>, <ast.Name object at 0x7da2054a7a90>]]]
|
keyword[def] identifier[diffPrefsPrior] ( identifier[priorstring] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[priorstring] , identifier[str] )
identifier[prior] = identifier[priorstring] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[prior] )== literal[int] keyword[and] identifier[prior] [ literal[int] ]== literal[string] :
[ identifier[c1] , identifier[c2] ]=[ identifier[float] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[prior] [ literal[int] :]]
keyword[assert] identifier[c1] > literal[int] keyword[and] identifier[c2] > literal[int] , literal[string]
keyword[return] ( literal[string] , identifier[c1] , identifier[c2] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[priorstring] ))
|
def diffPrefsPrior(priorstring):
"""Parses `priorstring` and returns `prior` tuple."""
assert isinstance(priorstring, str)
prior = priorstring.split(',')
if len(prior) == 3 and prior[0] == 'invquadratic':
[c1, c2] = [float(x) for x in prior[1:]]
assert c1 > 0 and c2 > 0, 'C1 and C2 must be > 1 for invquadratic prior'
return ('invquadratic', c1, c2) # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid diffprefsprior: {0}'.format(priorstring))
|
def ReadClientStartupInfoHistory(self, client_id, timerange=None):
"""Reads the full startup history for a particular client."""
from_time, to_time = self._ParseTimeRange(timerange)
history = self.startup_history.get(client_id)
if not history:
return []
res = []
for ts in sorted(history, reverse=True):
if ts < from_time or ts > to_time:
continue
client_data = rdf_client.StartupInfo.FromSerializedString(history[ts])
client_data.timestamp = ts
res.append(client_data)
return res
|
def function[ReadClientStartupInfoHistory, parameter[self, client_id, timerange]]:
constant[Reads the full startup history for a particular client.]
<ast.Tuple object at 0x7da2054a4d90> assign[=] call[name[self]._ParseTimeRange, parameter[name[timerange]]]
variable[history] assign[=] call[name[self].startup_history.get, parameter[name[client_id]]]
if <ast.UnaryOp object at 0x7da1b1d92350> begin[:]
return[list[[]]]
variable[res] assign[=] list[[]]
for taget[name[ts]] in starred[call[name[sorted], parameter[name[history]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1d92530> begin[:]
continue
variable[client_data] assign[=] call[name[rdf_client].StartupInfo.FromSerializedString, parameter[call[name[history]][name[ts]]]]
name[client_data].timestamp assign[=] name[ts]
call[name[res].append, parameter[name[client_data]]]
return[name[res]]
|
keyword[def] identifier[ReadClientStartupInfoHistory] ( identifier[self] , identifier[client_id] , identifier[timerange] = keyword[None] ):
literal[string]
identifier[from_time] , identifier[to_time] = identifier[self] . identifier[_ParseTimeRange] ( identifier[timerange] )
identifier[history] = identifier[self] . identifier[startup_history] . identifier[get] ( identifier[client_id] )
keyword[if] keyword[not] identifier[history] :
keyword[return] []
identifier[res] =[]
keyword[for] identifier[ts] keyword[in] identifier[sorted] ( identifier[history] , identifier[reverse] = keyword[True] ):
keyword[if] identifier[ts] < identifier[from_time] keyword[or] identifier[ts] > identifier[to_time] :
keyword[continue]
identifier[client_data] = identifier[rdf_client] . identifier[StartupInfo] . identifier[FromSerializedString] ( identifier[history] [ identifier[ts] ])
identifier[client_data] . identifier[timestamp] = identifier[ts]
identifier[res] . identifier[append] ( identifier[client_data] )
keyword[return] identifier[res]
|
def ReadClientStartupInfoHistory(self, client_id, timerange=None):
"""Reads the full startup history for a particular client."""
(from_time, to_time) = self._ParseTimeRange(timerange)
history = self.startup_history.get(client_id)
if not history:
return [] # depends on [control=['if'], data=[]]
res = []
for ts in sorted(history, reverse=True):
if ts < from_time or ts > to_time:
continue # depends on [control=['if'], data=[]]
client_data = rdf_client.StartupInfo.FromSerializedString(history[ts])
client_data.timestamp = ts
res.append(client_data) # depends on [control=['for'], data=['ts']]
return res
|
def create_incident(**kwargs):
"""
Creates an incident
"""
incidents = cachet.Incidents(endpoint=ENDPOINT, api_token=API_TOKEN)
if 'component_id' in kwargs:
return incidents.post(name=kwargs['name'],
message=kwargs['message'],
status=kwargs['status'],
component_id=kwargs['component_id'],
component_status=kwargs['component_status'])
else:
return incidents.post(name=kwargs['name'],
message=kwargs['message'],
status=kwargs['status'])
|
def function[create_incident, parameter[]]:
constant[
Creates an incident
]
variable[incidents] assign[=] call[name[cachet].Incidents, parameter[]]
if compare[constant[component_id] in name[kwargs]] begin[:]
return[call[name[incidents].post, parameter[]]]
|
keyword[def] identifier[create_incident] (** identifier[kwargs] ):
literal[string]
identifier[incidents] = identifier[cachet] . identifier[Incidents] ( identifier[endpoint] = identifier[ENDPOINT] , identifier[api_token] = identifier[API_TOKEN] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[return] identifier[incidents] . identifier[post] ( identifier[name] = identifier[kwargs] [ literal[string] ],
identifier[message] = identifier[kwargs] [ literal[string] ],
identifier[status] = identifier[kwargs] [ literal[string] ],
identifier[component_id] = identifier[kwargs] [ literal[string] ],
identifier[component_status] = identifier[kwargs] [ literal[string] ])
keyword[else] :
keyword[return] identifier[incidents] . identifier[post] ( identifier[name] = identifier[kwargs] [ literal[string] ],
identifier[message] = identifier[kwargs] [ literal[string] ],
identifier[status] = identifier[kwargs] [ literal[string] ])
|
def create_incident(**kwargs):
"""
Creates an incident
"""
incidents = cachet.Incidents(endpoint=ENDPOINT, api_token=API_TOKEN)
if 'component_id' in kwargs:
return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'], component_id=kwargs['component_id'], component_status=kwargs['component_status']) # depends on [control=['if'], data=['kwargs']]
else:
return incidents.post(name=kwargs['name'], message=kwargs['message'], status=kwargs['status'])
|
def restore_offsets(cls, client, parsed_consumer_offsets):
"""Fetch current offsets from kafka, validate them against given
consumer-offsets data and commit the new offsets.
:param client: Kafka-client
:param parsed_consumer_offsets: Parsed consumer offset data from json file
:type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
"""
# Fetch current offsets
try:
consumer_group = parsed_consumer_offsets['groupid']
topics_offset_data = parsed_consumer_offsets['offsets']
topic_partitions = dict(
(topic, [partition for partition in offset_data.keys()])
for topic, offset_data in six.iteritems(topics_offset_data)
)
except IndexError:
print(
"Error: Given parsed consumer-offset data {consumer_offsets} "
"could not be parsed".format(consumer_offsets=parsed_consumer_offsets),
file=sys.stderr,
)
raise
current_offsets = get_consumer_offsets_metadata(
client,
consumer_group,
topic_partitions,
)
# Build new offsets
new_offsets = cls.build_new_offsets(
client,
topics_offset_data,
topic_partitions,
current_offsets,
)
# Commit offsets
consumer_group = parsed_consumer_offsets['groupid']
set_consumer_offsets(client, consumer_group, new_offsets)
print("Restored to new offsets {offsets}".format(offsets=dict(new_offsets)))
|
def function[restore_offsets, parameter[cls, client, parsed_consumer_offsets]]:
constant[Fetch current offsets from kafka, validate them against given
consumer-offsets data and commit the new offsets.
:param client: Kafka-client
:param parsed_consumer_offsets: Parsed consumer offset data from json file
:type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
]
<ast.Try object at 0x7da1b0840040>
variable[current_offsets] assign[=] call[name[get_consumer_offsets_metadata], parameter[name[client], name[consumer_group], name[topic_partitions]]]
variable[new_offsets] assign[=] call[name[cls].build_new_offsets, parameter[name[client], name[topics_offset_data], name[topic_partitions], name[current_offsets]]]
variable[consumer_group] assign[=] call[name[parsed_consumer_offsets]][constant[groupid]]
call[name[set_consumer_offsets], parameter[name[client], name[consumer_group], name[new_offsets]]]
call[name[print], parameter[call[constant[Restored to new offsets {offsets}].format, parameter[]]]]
|
keyword[def] identifier[restore_offsets] ( identifier[cls] , identifier[client] , identifier[parsed_consumer_offsets] ):
literal[string]
keyword[try] :
identifier[consumer_group] = identifier[parsed_consumer_offsets] [ literal[string] ]
identifier[topics_offset_data] = identifier[parsed_consumer_offsets] [ literal[string] ]
identifier[topic_partitions] = identifier[dict] (
( identifier[topic] ,[ identifier[partition] keyword[for] identifier[partition] keyword[in] identifier[offset_data] . identifier[keys] ()])
keyword[for] identifier[topic] , identifier[offset_data] keyword[in] identifier[six] . identifier[iteritems] ( identifier[topics_offset_data] )
)
keyword[except] identifier[IndexError] :
identifier[print] (
literal[string]
literal[string] . identifier[format] ( identifier[consumer_offsets] = identifier[parsed_consumer_offsets] ),
identifier[file] = identifier[sys] . identifier[stderr] ,
)
keyword[raise]
identifier[current_offsets] = identifier[get_consumer_offsets_metadata] (
identifier[client] ,
identifier[consumer_group] ,
identifier[topic_partitions] ,
)
identifier[new_offsets] = identifier[cls] . identifier[build_new_offsets] (
identifier[client] ,
identifier[topics_offset_data] ,
identifier[topic_partitions] ,
identifier[current_offsets] ,
)
identifier[consumer_group] = identifier[parsed_consumer_offsets] [ literal[string] ]
identifier[set_consumer_offsets] ( identifier[client] , identifier[consumer_group] , identifier[new_offsets] )
identifier[print] ( literal[string] . identifier[format] ( identifier[offsets] = identifier[dict] ( identifier[new_offsets] )))
|
def restore_offsets(cls, client, parsed_consumer_offsets):
"""Fetch current offsets from kafka, validate them against given
consumer-offsets data and commit the new offsets.
:param client: Kafka-client
:param parsed_consumer_offsets: Parsed consumer offset data from json file
:type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
"""
# Fetch current offsets
try:
consumer_group = parsed_consumer_offsets['groupid']
topics_offset_data = parsed_consumer_offsets['offsets']
topic_partitions = dict(((topic, [partition for partition in offset_data.keys()]) for (topic, offset_data) in six.iteritems(topics_offset_data))) # depends on [control=['try'], data=[]]
except IndexError:
print('Error: Given parsed consumer-offset data {consumer_offsets} could not be parsed'.format(consumer_offsets=parsed_consumer_offsets), file=sys.stderr)
raise # depends on [control=['except'], data=[]]
current_offsets = get_consumer_offsets_metadata(client, consumer_group, topic_partitions)
# Build new offsets
new_offsets = cls.build_new_offsets(client, topics_offset_data, topic_partitions, current_offsets)
# Commit offsets
consumer_group = parsed_consumer_offsets['groupid']
set_consumer_offsets(client, consumer_group, new_offsets)
print('Restored to new offsets {offsets}'.format(offsets=dict(new_offsets)))
|
def remove_rows_matching(df, column, match):
"""
Return a ``DataFrame`` with rows where `column` values match `match` are removed.
The selected `column` series of values from the supplied Pandas ``DataFrame`` is compared
to `match`, and those rows that match are removed from the DataFrame.
:param df: Pandas ``DataFrame``
:param column: Column indexer
:param match: ``str`` match target
:return: Pandas ``DataFrame`` filtered
"""
df = df.copy()
mask = df[column].values != match
return df.iloc[mask, :]
|
def function[remove_rows_matching, parameter[df, column, match]]:
constant[
Return a ``DataFrame`` with rows where `column` values match `match` are removed.
The selected `column` series of values from the supplied Pandas ``DataFrame`` is compared
to `match`, and those rows that match are removed from the DataFrame.
:param df: Pandas ``DataFrame``
:param column: Column indexer
:param match: ``str`` match target
:return: Pandas ``DataFrame`` filtered
]
variable[df] assign[=] call[name[df].copy, parameter[]]
variable[mask] assign[=] compare[call[name[df]][name[column]].values not_equal[!=] name[match]]
return[call[name[df].iloc][tuple[[<ast.Name object at 0x7da18c4cfac0>, <ast.Slice object at 0x7da18c4cc820>]]]]
|
keyword[def] identifier[remove_rows_matching] ( identifier[df] , identifier[column] , identifier[match] ):
literal[string]
identifier[df] = identifier[df] . identifier[copy] ()
identifier[mask] = identifier[df] [ identifier[column] ]. identifier[values] != identifier[match]
keyword[return] identifier[df] . identifier[iloc] [ identifier[mask] ,:]
|
def remove_rows_matching(df, column, match):
"""
Return a ``DataFrame`` with rows where `column` values match `match` are removed.
The selected `column` series of values from the supplied Pandas ``DataFrame`` is compared
to `match`, and those rows that match are removed from the DataFrame.
:param df: Pandas ``DataFrame``
:param column: Column indexer
:param match: ``str`` match target
:return: Pandas ``DataFrame`` filtered
"""
df = df.copy()
mask = df[column].values != match
return df.iloc[mask, :]
|
def parse_url(arg, extract, key=None):
"""
Returns the portion of a URL corresponding to a part specified
by 'extract'
Can optionally specify a key to retrieve an associated value
if extract parameter is 'QUERY'
Parameters
----------
extract : one of {'PROTOCOL', 'HOST', 'PATH', 'REF',
'AUTHORITY', 'FILE', 'USERINFO', 'QUERY'}
key : string (optional)
Examples
--------
>>> url = "https://www.youtube.com/watch?v=kEuEcWfewf8&t=10"
>>> parse_url(url, 'QUERY', 'v') # doctest: +SKIP
'kEuEcWfewf8'
Returns
-------
extracted : string
"""
return ops.ParseURL(arg, extract, key).to_expr()
|
def function[parse_url, parameter[arg, extract, key]]:
constant[
Returns the portion of a URL corresponding to a part specified
by 'extract'
Can optionally specify a key to retrieve an associated value
if extract parameter is 'QUERY'
Parameters
----------
extract : one of {'PROTOCOL', 'HOST', 'PATH', 'REF',
'AUTHORITY', 'FILE', 'USERINFO', 'QUERY'}
key : string (optional)
Examples
--------
>>> url = "https://www.youtube.com/watch?v=kEuEcWfewf8&t=10"
>>> parse_url(url, 'QUERY', 'v') # doctest: +SKIP
'kEuEcWfewf8'
Returns
-------
extracted : string
]
return[call[call[name[ops].ParseURL, parameter[name[arg], name[extract], name[key]]].to_expr, parameter[]]]
|
keyword[def] identifier[parse_url] ( identifier[arg] , identifier[extract] , identifier[key] = keyword[None] ):
literal[string]
keyword[return] identifier[ops] . identifier[ParseURL] ( identifier[arg] , identifier[extract] , identifier[key] ). identifier[to_expr] ()
|
def parse_url(arg, extract, key=None):
"""
Returns the portion of a URL corresponding to a part specified
by 'extract'
Can optionally specify a key to retrieve an associated value
if extract parameter is 'QUERY'
Parameters
----------
extract : one of {'PROTOCOL', 'HOST', 'PATH', 'REF',
'AUTHORITY', 'FILE', 'USERINFO', 'QUERY'}
key : string (optional)
Examples
--------
>>> url = "https://www.youtube.com/watch?v=kEuEcWfewf8&t=10"
>>> parse_url(url, 'QUERY', 'v') # doctest: +SKIP
'kEuEcWfewf8'
Returns
-------
extracted : string
"""
return ops.ParseURL(arg, extract, key).to_expr()
|
def get_ticket_for_sns_token(self):
"""This is a shortcut for getting the sns_token, as a post data of
request body."""
self.logger.info("%s\t%s" % (self.request_method, self.request_url))
return {
"openid": self.get_openid(),
"persistent_code": self.get_persistent_code(),
}
|
def function[get_ticket_for_sns_token, parameter[self]]:
constant[This is a shortcut for getting the sns_token, as a post data of
request body.]
call[name[self].logger.info, parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b237d240>, <ast.Attribute object at 0x7da1b237feb0>]]]]]
return[dictionary[[<ast.Constant object at 0x7da1b237f0d0>, <ast.Constant object at 0x7da1b237f850>], [<ast.Call object at 0x7da1b237fca0>, <ast.Call object at 0x7da1b237fbb0>]]]
|
keyword[def] identifier[get_ticket_for_sns_token] ( identifier[self] ):
literal[string]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[self] . identifier[request_method] , identifier[self] . identifier[request_url] ))
keyword[return] {
literal[string] : identifier[self] . identifier[get_openid] (),
literal[string] : identifier[self] . identifier[get_persistent_code] (),
}
|
def get_ticket_for_sns_token(self):
"""This is a shortcut for getting the sns_token, as a post data of
request body."""
self.logger.info('%s\t%s' % (self.request_method, self.request_url))
return {'openid': self.get_openid(), 'persistent_code': self.get_persistent_code()}
|
def _add_routes(self, settings):
"""
Add BGP routes from given settings.
All valid routes are loaded.
Miss-configured routes are ignored and errors are logged.
"""
for route_settings in settings:
if 'prefix' in route_settings:
prefix_add = self.speaker.prefix_add
elif 'route_type' in route_settings:
prefix_add = self.speaker.evpn_prefix_add
elif 'flowspec_family' in route_settings:
prefix_add = self.speaker.flowspec_prefix_add
else:
LOG.debug('Skip invalid route settings: %s', route_settings)
continue
LOG.debug('Adding route settings: %s', route_settings)
try:
prefix_add(**route_settings)
except RuntimeConfigError as e:
LOG.exception(e)
|
def function[_add_routes, parameter[self, settings]]:
constant[
Add BGP routes from given settings.
All valid routes are loaded.
Miss-configured routes are ignored and errors are logged.
]
for taget[name[route_settings]] in starred[name[settings]] begin[:]
if compare[constant[prefix] in name[route_settings]] begin[:]
variable[prefix_add] assign[=] name[self].speaker.prefix_add
call[name[LOG].debug, parameter[constant[Adding route settings: %s], name[route_settings]]]
<ast.Try object at 0x7da1b1b0e1d0>
|
keyword[def] identifier[_add_routes] ( identifier[self] , identifier[settings] ):
literal[string]
keyword[for] identifier[route_settings] keyword[in] identifier[settings] :
keyword[if] literal[string] keyword[in] identifier[route_settings] :
identifier[prefix_add] = identifier[self] . identifier[speaker] . identifier[prefix_add]
keyword[elif] literal[string] keyword[in] identifier[route_settings] :
identifier[prefix_add] = identifier[self] . identifier[speaker] . identifier[evpn_prefix_add]
keyword[elif] literal[string] keyword[in] identifier[route_settings] :
identifier[prefix_add] = identifier[self] . identifier[speaker] . identifier[flowspec_prefix_add]
keyword[else] :
identifier[LOG] . identifier[debug] ( literal[string] , identifier[route_settings] )
keyword[continue]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[route_settings] )
keyword[try] :
identifier[prefix_add] (** identifier[route_settings] )
keyword[except] identifier[RuntimeConfigError] keyword[as] identifier[e] :
identifier[LOG] . identifier[exception] ( identifier[e] )
|
def _add_routes(self, settings):
"""
Add BGP routes from given settings.
All valid routes are loaded.
Miss-configured routes are ignored and errors are logged.
"""
for route_settings in settings:
if 'prefix' in route_settings:
prefix_add = self.speaker.prefix_add # depends on [control=['if'], data=[]]
elif 'route_type' in route_settings:
prefix_add = self.speaker.evpn_prefix_add # depends on [control=['if'], data=[]]
elif 'flowspec_family' in route_settings:
prefix_add = self.speaker.flowspec_prefix_add # depends on [control=['if'], data=[]]
else:
LOG.debug('Skip invalid route settings: %s', route_settings)
continue
LOG.debug('Adding route settings: %s', route_settings)
try:
prefix_add(**route_settings) # depends on [control=['try'], data=[]]
except RuntimeConfigError as e:
LOG.exception(e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['route_settings']]
|
def element(self, inp=None, order=None, **kwargs):
"""Create an element from ``inp`` or from scratch.
Parameters
----------
inp : optional
Input data to create an element from. It needs to be
understood by either the `sampling` operator of this
instance or by its ``tspace.element`` method.
order : {None, 'C', 'F'}, optional
Storage order of the returned element. For ``'C'`` and ``'F'``,
contiguous memory in the respective ordering is enforced.
The default ``None`` enforces no contiguousness.
kwargs :
Additional arguments passed on to `sampling` when called
on ``inp``, in the form ``sampling(inp, **kwargs)``.
This can be used e.g. for functions with parameters.
Returns
-------
element : `DiscretizedSpaceElement`
The discretized element, calculated as ``sampling(inp)`` or
``tspace.element(inp)``, tried in this order.
See Also
--------
sampling : create a discrete element from a non-discretized one
"""
if inp is None:
return self.element_type(self, self.tspace.element(order=order))
elif inp in self and order is None:
return inp
elif callable(inp):
sampled = self.sampling(inp, **kwargs)
return self.element_type(self,
self.tspace.element(sampled, order=order))
else:
return self.element_type(self,
self.tspace.element(inp, order=order))
|
def function[element, parameter[self, inp, order]]:
constant[Create an element from ``inp`` or from scratch.
Parameters
----------
inp : optional
Input data to create an element from. It needs to be
understood by either the `sampling` operator of this
instance or by its ``tspace.element`` method.
order : {None, 'C', 'F'}, optional
Storage order of the returned element. For ``'C'`` and ``'F'``,
contiguous memory in the respective ordering is enforced.
The default ``None`` enforces no contiguousness.
kwargs :
Additional arguments passed on to `sampling` when called
on ``inp``, in the form ``sampling(inp, **kwargs)``.
This can be used e.g. for functions with parameters.
Returns
-------
element : `DiscretizedSpaceElement`
The discretized element, calculated as ``sampling(inp)`` or
``tspace.element(inp)``, tried in this order.
See Also
--------
sampling : create a discrete element from a non-discretized one
]
if compare[name[inp] is constant[None]] begin[:]
return[call[name[self].element_type, parameter[name[self], call[name[self].tspace.element, parameter[]]]]]
|
keyword[def] identifier[element] ( identifier[self] , identifier[inp] = keyword[None] , identifier[order] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[inp] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[element_type] ( identifier[self] , identifier[self] . identifier[tspace] . identifier[element] ( identifier[order] = identifier[order] ))
keyword[elif] identifier[inp] keyword[in] identifier[self] keyword[and] identifier[order] keyword[is] keyword[None] :
keyword[return] identifier[inp]
keyword[elif] identifier[callable] ( identifier[inp] ):
identifier[sampled] = identifier[self] . identifier[sampling] ( identifier[inp] ,** identifier[kwargs] )
keyword[return] identifier[self] . identifier[element_type] ( identifier[self] ,
identifier[self] . identifier[tspace] . identifier[element] ( identifier[sampled] , identifier[order] = identifier[order] ))
keyword[else] :
keyword[return] identifier[self] . identifier[element_type] ( identifier[self] ,
identifier[self] . identifier[tspace] . identifier[element] ( identifier[inp] , identifier[order] = identifier[order] ))
|
def element(self, inp=None, order=None, **kwargs):
"""Create an element from ``inp`` or from scratch.
Parameters
----------
inp : optional
Input data to create an element from. It needs to be
understood by either the `sampling` operator of this
instance or by its ``tspace.element`` method.
order : {None, 'C', 'F'}, optional
Storage order of the returned element. For ``'C'`` and ``'F'``,
contiguous memory in the respective ordering is enforced.
The default ``None`` enforces no contiguousness.
kwargs :
Additional arguments passed on to `sampling` when called
on ``inp``, in the form ``sampling(inp, **kwargs)``.
This can be used e.g. for functions with parameters.
Returns
-------
element : `DiscretizedSpaceElement`
The discretized element, calculated as ``sampling(inp)`` or
``tspace.element(inp)``, tried in this order.
See Also
--------
sampling : create a discrete element from a non-discretized one
"""
if inp is None:
return self.element_type(self, self.tspace.element(order=order)) # depends on [control=['if'], data=[]]
elif inp in self and order is None:
return inp # depends on [control=['if'], data=[]]
elif callable(inp):
sampled = self.sampling(inp, **kwargs)
return self.element_type(self, self.tspace.element(sampled, order=order)) # depends on [control=['if'], data=[]]
else:
return self.element_type(self, self.tspace.element(inp, order=order))
|
def get_query_kwargs(es_defs):
"""
Reads the es_defs and returns a dict of special kwargs to use when
query for data of an instance of a class
reference: rdfframework.sparl.queries.sparqlAllItemDataTemplate.rq
"""
rtn_dict = {}
if es_defs:
if es_defs.get("kds_esSpecialUnion"):
rtn_dict['special_union'] = \
es_defs["kds_esSpecialUnion"][0]
if es_defs.get("kds_esQueryFilter"):
rtn_dict['filters'] = \
es_defs["kds_esQueryFilter"][0]
return rtn_dict
|
def function[get_query_kwargs, parameter[es_defs]]:
constant[
Reads the es_defs and returns a dict of special kwargs to use when
query for data of an instance of a class
reference: rdfframework.sparl.queries.sparqlAllItemDataTemplate.rq
]
variable[rtn_dict] assign[=] dictionary[[], []]
if name[es_defs] begin[:]
if call[name[es_defs].get, parameter[constant[kds_esSpecialUnion]]] begin[:]
call[name[rtn_dict]][constant[special_union]] assign[=] call[call[name[es_defs]][constant[kds_esSpecialUnion]]][constant[0]]
if call[name[es_defs].get, parameter[constant[kds_esQueryFilter]]] begin[:]
call[name[rtn_dict]][constant[filters]] assign[=] call[call[name[es_defs]][constant[kds_esQueryFilter]]][constant[0]]
return[name[rtn_dict]]
|
keyword[def] identifier[get_query_kwargs] ( identifier[es_defs] ):
literal[string]
identifier[rtn_dict] ={}
keyword[if] identifier[es_defs] :
keyword[if] identifier[es_defs] . identifier[get] ( literal[string] ):
identifier[rtn_dict] [ literal[string] ]= identifier[es_defs] [ literal[string] ][ literal[int] ]
keyword[if] identifier[es_defs] . identifier[get] ( literal[string] ):
identifier[rtn_dict] [ literal[string] ]= identifier[es_defs] [ literal[string] ][ literal[int] ]
keyword[return] identifier[rtn_dict]
|
def get_query_kwargs(es_defs):
"""
Reads the es_defs and returns a dict of special kwargs to use when
query for data of an instance of a class
reference: rdfframework.sparl.queries.sparqlAllItemDataTemplate.rq
"""
rtn_dict = {}
if es_defs:
if es_defs.get('kds_esSpecialUnion'):
rtn_dict['special_union'] = es_defs['kds_esSpecialUnion'][0] # depends on [control=['if'], data=[]]
if es_defs.get('kds_esQueryFilter'):
rtn_dict['filters'] = es_defs['kds_esQueryFilter'][0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return rtn_dict
|
def is_quota_exceeded(self) -> bool:
'''Return whether the quota is exceeded.'''
if self.quota and self._url_table is not None:
return self.size >= self.quota and \
self._url_table.get_root_url_todo_count() == 0
|
def function[is_quota_exceeded, parameter[self]]:
constant[Return whether the quota is exceeded.]
if <ast.BoolOp object at 0x7da1b2347b20> begin[:]
return[<ast.BoolOp object at 0x7da1b2344b80>]
|
keyword[def] identifier[is_quota_exceeded] ( identifier[self] )-> identifier[bool] :
literal[string]
keyword[if] identifier[self] . identifier[quota] keyword[and] identifier[self] . identifier[_url_table] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[size] >= identifier[self] . identifier[quota] keyword[and] identifier[self] . identifier[_url_table] . identifier[get_root_url_todo_count] ()== literal[int]
|
def is_quota_exceeded(self) -> bool:
"""Return whether the quota is exceeded."""
if self.quota and self._url_table is not None:
return self.size >= self.quota and self._url_table.get_root_url_todo_count() == 0 # depends on [control=['if'], data=[]]
|
def get_type(type_):
"""
Gets the declaration for the corresponding custom type.
@raise KeyError: Unknown type.
@see: L{add_type} and L{remove_type}
"""
if isinstance(type_, list):
type_ = tuple(type_)
for k, v in TYPE_MAP.iteritems():
if k == type_:
return v
raise KeyError("Unknown type %r" % (type_,))
|
def function[get_type, parameter[type_]]:
constant[
Gets the declaration for the corresponding custom type.
@raise KeyError: Unknown type.
@see: L{add_type} and L{remove_type}
]
if call[name[isinstance], parameter[name[type_], name[list]]] begin[:]
variable[type_] assign[=] call[name[tuple], parameter[name[type_]]]
for taget[tuple[[<ast.Name object at 0x7da18f00f3a0>, <ast.Name object at 0x7da18f00c820>]]] in starred[call[name[TYPE_MAP].iteritems, parameter[]]] begin[:]
if compare[name[k] equal[==] name[type_]] begin[:]
return[name[v]]
<ast.Raise object at 0x7da18f00d510>
|
keyword[def] identifier[get_type] ( identifier[type_] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[type_] , identifier[list] ):
identifier[type_] = identifier[tuple] ( identifier[type_] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[TYPE_MAP] . identifier[iteritems] ():
keyword[if] identifier[k] == identifier[type_] :
keyword[return] identifier[v]
keyword[raise] identifier[KeyError] ( literal[string] %( identifier[type_] ,))
|
def get_type(type_):
"""
Gets the declaration for the corresponding custom type.
@raise KeyError: Unknown type.
@see: L{add_type} and L{remove_type}
"""
if isinstance(type_, list):
type_ = tuple(type_) # depends on [control=['if'], data=[]]
for (k, v) in TYPE_MAP.iteritems():
if k == type_:
return v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise KeyError('Unknown type %r' % (type_,))
|
def network_from_bbox(lat_min=None, lng_min=None, lat_max=None, lng_max=None,
bbox=None, network_type='walk', two_way=True,
timeout=180, memory=None,
max_query_area_size=50*1000*50*1000,
custom_osm_filter=None):
"""
Make a graph network from a bounding lat/lon box composed of nodes and
edges for use in Pandana street network accessibility calculations.
You may either enter a lat/long box via the four lat_min,
lng_min, lat_max, lng_max parameters or the bbox parameter as a tuple.
Parameters
----------
lat_min : float
southern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_min : float
eastern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lat_max : float
northern longitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_max : float
western longitude of bounding box, if this parameter is used the bbox
parameter should be None.
bbox : tuple
Bounding box formatted as a 4 element tuple:
(lng_max, lat_min, lng_min, lat_max)
example: (-122.304611,37.798933,-122.263412,37.822802)
a bbox can be extracted for an area using: the CSV format bbox from
http://boundingbox.klokantech.com/. If this parameter is used the
lat_min, lng_min, lat_max, lng_max parameters in this function
should be None.
network_type : {'walk', 'drive'}, optional
Specify the network type where value of 'walk' includes roadways where
pedestrians are allowed and pedestrian pathways and 'drive' includes
driveable roadways. To use a custom definition see the
custom_osm_filter parameter. Default is walk.
two_way : bool, optional
Whether the routes are two-way. If True, node pairs will only
occur once.
timeout : int, optional
the timeout interval for requests and to pass to Overpass API
memory : int, optional
server memory allocation size for the query, in bytes. If none,
server will use its default allocation size
max_query_area_size : float, optional
max area for any part of the geometry, in the units the geometry is
in: any polygon bigger will get divided up for multiple queries to
Overpass API (default is 50,000 * 50,000 units (ie, 50km x 50km in
area, if units are meters))
custom_osm_filter : string, optional
specify custom arguments for the way["highway"] query to OSM. Must
follow Overpass API schema. For
example to request highway ways that are service roads use:
'["highway"="service"]'
Returns
-------
nodesfinal, edgesfinal : pandas.DataFrame
"""
start_time = time.time()
if bbox is not None:
assert isinstance(bbox, tuple) \
and len(bbox) == 4, 'bbox must be a 4 element tuple'
assert (lat_min is None) and (lng_min is None) and \
(lat_max is None) and (lng_max is None), \
'lat_min, lng_min, lat_max and lng_max must be None ' \
'if you are using bbox'
lng_max, lat_min, lng_min, lat_max = bbox
assert lat_min is not None, 'lat_min cannot be None'
assert lng_min is not None, 'lng_min cannot be None'
assert lat_max is not None, 'lat_max cannot be None'
assert lng_max is not None, 'lng_max cannot be None'
assert isinstance(lat_min, float) and isinstance(lng_min, float) and \
isinstance(lat_max, float) and isinstance(lng_max, float), \
'lat_min, lng_min, lat_max, and lng_max must be floats'
nodes, ways, waynodes = ways_in_bbox(
lat_min=lat_min, lng_min=lng_min, lat_max=lat_max, lng_max=lng_max,
network_type=network_type, timeout=timeout,
memory=memory, max_query_area_size=max_query_area_size,
custom_osm_filter=custom_osm_filter)
log('Returning OSM data with {:,} nodes and {:,} ways...'
.format(len(nodes), len(ways)))
edgesfinal = node_pairs(nodes, ways, waynodes, two_way=two_way)
# make the unique set of nodes that ended up in pairs
node_ids = sorted(set(edgesfinal['from_id'].unique())
.union(set(edgesfinal['to_id'].unique())))
nodesfinal = nodes.loc[node_ids]
nodesfinal = nodesfinal[['lon', 'lat']]
nodesfinal.rename(columns={'lon': 'x', 'lat': 'y'}, inplace=True)
nodesfinal['id'] = nodesfinal.index
edgesfinal.rename(columns={'from_id': 'from', 'to_id': 'to'}, inplace=True)
log('Returning processed graph with {:,} nodes and {:,} edges...'
.format(len(nodesfinal), len(edgesfinal)))
log('Completed OSM data download and Pandana node and edge table '
'creation in {:,.2f} seconds'.format(time.time()-start_time))
return nodesfinal, edgesfinal
|
def function[network_from_bbox, parameter[lat_min, lng_min, lat_max, lng_max, bbox, network_type, two_way, timeout, memory, max_query_area_size, custom_osm_filter]]:
constant[
Make a graph network from a bounding lat/lon box composed of nodes and
edges for use in Pandana street network accessibility calculations.
You may either enter a lat/long box via the four lat_min,
lng_min, lat_max, lng_max parameters or the bbox parameter as a tuple.
Parameters
----------
lat_min : float
southern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_min : float
eastern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lat_max : float
northern longitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_max : float
western longitude of bounding box, if this parameter is used the bbox
parameter should be None.
bbox : tuple
Bounding box formatted as a 4 element tuple:
(lng_max, lat_min, lng_min, lat_max)
example: (-122.304611,37.798933,-122.263412,37.822802)
a bbox can be extracted for an area using: the CSV format bbox from
http://boundingbox.klokantech.com/. If this parameter is used the
lat_min, lng_min, lat_max, lng_max parameters in this function
should be None.
network_type : {'walk', 'drive'}, optional
Specify the network type where value of 'walk' includes roadways where
pedestrians are allowed and pedestrian pathways and 'drive' includes
driveable roadways. To use a custom definition see the
custom_osm_filter parameter. Default is walk.
two_way : bool, optional
Whether the routes are two-way. If True, node pairs will only
occur once.
timeout : int, optional
the timeout interval for requests and to pass to Overpass API
memory : int, optional
server memory allocation size for the query, in bytes. If none,
server will use its default allocation size
max_query_area_size : float, optional
max area for any part of the geometry, in the units the geometry is
in: any polygon bigger will get divided up for multiple queries to
Overpass API (default is 50,000 * 50,000 units (ie, 50km x 50km in
area, if units are meters))
custom_osm_filter : string, optional
specify custom arguments for the way["highway"] query to OSM. Must
follow Overpass API schema. For
example to request highway ways that are service roads use:
'["highway"="service"]'
Returns
-------
nodesfinal, edgesfinal : pandas.DataFrame
]
variable[start_time] assign[=] call[name[time].time, parameter[]]
if compare[name[bbox] is_not constant[None]] begin[:]
assert[<ast.BoolOp object at 0x7da1b0f20880>]
assert[<ast.BoolOp object at 0x7da1b0f20ac0>]
<ast.Tuple object at 0x7da1b0f20d90> assign[=] name[bbox]
assert[compare[name[lat_min] is_not constant[None]]]
assert[compare[name[lng_min] is_not constant[None]]]
assert[compare[name[lat_max] is_not constant[None]]]
assert[compare[name[lng_max] is_not constant[None]]]
assert[<ast.BoolOp object at 0x7da1b0f212a0>]
<ast.Tuple object at 0x7da1b0f21630> assign[=] call[name[ways_in_bbox], parameter[]]
call[name[log], parameter[call[constant[Returning OSM data with {:,} nodes and {:,} ways...].format, parameter[call[name[len], parameter[name[nodes]]], call[name[len], parameter[name[ways]]]]]]]
variable[edgesfinal] assign[=] call[name[node_pairs], parameter[name[nodes], name[ways], name[waynodes]]]
variable[node_ids] assign[=] call[name[sorted], parameter[call[call[name[set], parameter[call[call[name[edgesfinal]][constant[from_id]].unique, parameter[]]]].union, parameter[call[name[set], parameter[call[call[name[edgesfinal]][constant[to_id]].unique, parameter[]]]]]]]]
variable[nodesfinal] assign[=] call[name[nodes].loc][name[node_ids]]
variable[nodesfinal] assign[=] call[name[nodesfinal]][list[[<ast.Constant object at 0x7da1b0f22770>, <ast.Constant object at 0x7da1b0f227a0>]]]
call[name[nodesfinal].rename, parameter[]]
call[name[nodesfinal]][constant[id]] assign[=] name[nodesfinal].index
call[name[edgesfinal].rename, parameter[]]
call[name[log], parameter[call[constant[Returning processed graph with {:,} nodes and {:,} edges...].format, parameter[call[name[len], parameter[name[nodesfinal]]], call[name[len], parameter[name[edgesfinal]]]]]]]
call[name[log], parameter[call[constant[Completed OSM data download and Pandana node and edge table creation in {:,.2f} seconds].format, parameter[binary_operation[call[name[time].time, parameter[]] - name[start_time]]]]]]
return[tuple[[<ast.Name object at 0x7da1b0f23c10>, <ast.Name object at 0x7da1b0f23be0>]]]
|
keyword[def] identifier[network_from_bbox] ( identifier[lat_min] = keyword[None] , identifier[lng_min] = keyword[None] , identifier[lat_max] = keyword[None] , identifier[lng_max] = keyword[None] ,
identifier[bbox] = keyword[None] , identifier[network_type] = literal[string] , identifier[two_way] = keyword[True] ,
identifier[timeout] = literal[int] , identifier[memory] = keyword[None] ,
identifier[max_query_area_size] = literal[int] * literal[int] * literal[int] * literal[int] ,
identifier[custom_osm_filter] = keyword[None] ):
literal[string]
identifier[start_time] = identifier[time] . identifier[time] ()
keyword[if] identifier[bbox] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[isinstance] ( identifier[bbox] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[bbox] )== literal[int] , literal[string]
keyword[assert] ( identifier[lat_min] keyword[is] keyword[None] ) keyword[and] ( identifier[lng_min] keyword[is] keyword[None] ) keyword[and] ( identifier[lat_max] keyword[is] keyword[None] ) keyword[and] ( identifier[lng_max] keyword[is] keyword[None] ), literal[string] literal[string]
identifier[lng_max] , identifier[lat_min] , identifier[lng_min] , identifier[lat_max] = identifier[bbox]
keyword[assert] identifier[lat_min] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[assert] identifier[lng_min] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[assert] identifier[lat_max] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[assert] identifier[lng_max] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[assert] identifier[isinstance] ( identifier[lat_min] , identifier[float] ) keyword[and] identifier[isinstance] ( identifier[lng_min] , identifier[float] ) keyword[and] identifier[isinstance] ( identifier[lat_max] , identifier[float] ) keyword[and] identifier[isinstance] ( identifier[lng_max] , identifier[float] ), literal[string]
identifier[nodes] , identifier[ways] , identifier[waynodes] = identifier[ways_in_bbox] (
identifier[lat_min] = identifier[lat_min] , identifier[lng_min] = identifier[lng_min] , identifier[lat_max] = identifier[lat_max] , identifier[lng_max] = identifier[lng_max] ,
identifier[network_type] = identifier[network_type] , identifier[timeout] = identifier[timeout] ,
identifier[memory] = identifier[memory] , identifier[max_query_area_size] = identifier[max_query_area_size] ,
identifier[custom_osm_filter] = identifier[custom_osm_filter] )
identifier[log] ( literal[string]
. identifier[format] ( identifier[len] ( identifier[nodes] ), identifier[len] ( identifier[ways] )))
identifier[edgesfinal] = identifier[node_pairs] ( identifier[nodes] , identifier[ways] , identifier[waynodes] , identifier[two_way] = identifier[two_way] )
identifier[node_ids] = identifier[sorted] ( identifier[set] ( identifier[edgesfinal] [ literal[string] ]. identifier[unique] ())
. identifier[union] ( identifier[set] ( identifier[edgesfinal] [ literal[string] ]. identifier[unique] ())))
identifier[nodesfinal] = identifier[nodes] . identifier[loc] [ identifier[node_ids] ]
identifier[nodesfinal] = identifier[nodesfinal] [[ literal[string] , literal[string] ]]
identifier[nodesfinal] . identifier[rename] ( identifier[columns] ={ literal[string] : literal[string] , literal[string] : literal[string] }, identifier[inplace] = keyword[True] )
identifier[nodesfinal] [ literal[string] ]= identifier[nodesfinal] . identifier[index]
identifier[edgesfinal] . identifier[rename] ( identifier[columns] ={ literal[string] : literal[string] , literal[string] : literal[string] }, identifier[inplace] = keyword[True] )
identifier[log] ( literal[string]
. identifier[format] ( identifier[len] ( identifier[nodesfinal] ), identifier[len] ( identifier[edgesfinal] )))
identifier[log] ( literal[string]
literal[string] . identifier[format] ( identifier[time] . identifier[time] ()- identifier[start_time] ))
keyword[return] identifier[nodesfinal] , identifier[edgesfinal]
|
def network_from_bbox(lat_min=None, lng_min=None, lat_max=None, lng_max=None, bbox=None, network_type='walk', two_way=True, timeout=180, memory=None, max_query_area_size=50 * 1000 * 50 * 1000, custom_osm_filter=None):
"""
Make a graph network from a bounding lat/lon box composed of nodes and
edges for use in Pandana street network accessibility calculations.
You may either enter a lat/long box via the four lat_min,
lng_min, lat_max, lng_max parameters or the bbox parameter as a tuple.
Parameters
----------
lat_min : float
southern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_min : float
eastern latitude of bounding box, if this parameter is used the bbox
parameter should be None.
lat_max : float
northern longitude of bounding box, if this parameter is used the bbox
parameter should be None.
lng_max : float
western longitude of bounding box, if this parameter is used the bbox
parameter should be None.
bbox : tuple
Bounding box formatted as a 4 element tuple:
(lng_max, lat_min, lng_min, lat_max)
example: (-122.304611,37.798933,-122.263412,37.822802)
a bbox can be extracted for an area using: the CSV format bbox from
http://boundingbox.klokantech.com/. If this parameter is used the
lat_min, lng_min, lat_max, lng_max parameters in this function
should be None.
network_type : {'walk', 'drive'}, optional
Specify the network type where value of 'walk' includes roadways where
pedestrians are allowed and pedestrian pathways and 'drive' includes
driveable roadways. To use a custom definition see the
custom_osm_filter parameter. Default is walk.
two_way : bool, optional
Whether the routes are two-way. If True, node pairs will only
occur once.
timeout : int, optional
the timeout interval for requests and to pass to Overpass API
memory : int, optional
server memory allocation size for the query, in bytes. If none,
server will use its default allocation size
max_query_area_size : float, optional
max area for any part of the geometry, in the units the geometry is
in: any polygon bigger will get divided up for multiple queries to
Overpass API (default is 50,000 * 50,000 units (ie, 50km x 50km in
area, if units are meters))
custom_osm_filter : string, optional
specify custom arguments for the way["highway"] query to OSM. Must
follow Overpass API schema. For
example to request highway ways that are service roads use:
'["highway"="service"]'
Returns
-------
nodesfinal, edgesfinal : pandas.DataFrame
"""
start_time = time.time()
if bbox is not None:
assert isinstance(bbox, tuple) and len(bbox) == 4, 'bbox must be a 4 element tuple'
assert lat_min is None and lng_min is None and (lat_max is None) and (lng_max is None), 'lat_min, lng_min, lat_max and lng_max must be None if you are using bbox'
(lng_max, lat_min, lng_min, lat_max) = bbox # depends on [control=['if'], data=['bbox']]
assert lat_min is not None, 'lat_min cannot be None'
assert lng_min is not None, 'lng_min cannot be None'
assert lat_max is not None, 'lat_max cannot be None'
assert lng_max is not None, 'lng_max cannot be None'
assert isinstance(lat_min, float) and isinstance(lng_min, float) and isinstance(lat_max, float) and isinstance(lng_max, float), 'lat_min, lng_min, lat_max, and lng_max must be floats'
(nodes, ways, waynodes) = ways_in_bbox(lat_min=lat_min, lng_min=lng_min, lat_max=lat_max, lng_max=lng_max, network_type=network_type, timeout=timeout, memory=memory, max_query_area_size=max_query_area_size, custom_osm_filter=custom_osm_filter)
log('Returning OSM data with {:,} nodes and {:,} ways...'.format(len(nodes), len(ways)))
edgesfinal = node_pairs(nodes, ways, waynodes, two_way=two_way)
# make the unique set of nodes that ended up in pairs
node_ids = sorted(set(edgesfinal['from_id'].unique()).union(set(edgesfinal['to_id'].unique())))
nodesfinal = nodes.loc[node_ids]
nodesfinal = nodesfinal[['lon', 'lat']]
nodesfinal.rename(columns={'lon': 'x', 'lat': 'y'}, inplace=True)
nodesfinal['id'] = nodesfinal.index
edgesfinal.rename(columns={'from_id': 'from', 'to_id': 'to'}, inplace=True)
log('Returning processed graph with {:,} nodes and {:,} edges...'.format(len(nodesfinal), len(edgesfinal)))
log('Completed OSM data download and Pandana node and edge table creation in {:,.2f} seconds'.format(time.time() - start_time))
return (nodesfinal, edgesfinal)
|
def namedb_get_name(cur, name, current_block, include_expired=False, include_history=True, only_registered=True):
"""
Get a name and all of its history. Note: will return a revoked name
Return the name + history on success
Return None if the name doesn't exist, or is expired (NOTE: will return a revoked name)
"""
if not include_expired:
unexpired_fragment, unexpired_args = namedb_select_where_unexpired_names(current_block, only_registered=only_registered)
select_query = "SELECT name_records.* FROM name_records JOIN namespaces ON name_records.namespace_id = namespaces.namespace_id " + \
"WHERE name = ? AND " + unexpired_fragment + ";"
args = (name, ) + unexpired_args
else:
select_query = "SELECT * FROM name_records WHERE name = ?;"
args = (name,)
# log.debug(namedb_format_query(select_query, args))
name_rows = namedb_query_execute( cur, select_query, args )
name_row = name_rows.fetchone()
if name_row is None:
# no such name
return None
name_rec = {}
name_rec.update( name_row )
if include_history:
name_history = namedb_get_history( cur, name )
name_rec['history'] = name_history
return name_rec
|
def function[namedb_get_name, parameter[cur, name, current_block, include_expired, include_history, only_registered]]:
constant[
Get a name and all of its history. Note: will return a revoked name
Return the name + history on success
Return None if the name doesn't exist, or is expired (NOTE: will return a revoked name)
]
if <ast.UnaryOp object at 0x7da20c6a8ac0> begin[:]
<ast.Tuple object at 0x7da20c6a92a0> assign[=] call[name[namedb_select_where_unexpired_names], parameter[name[current_block]]]
variable[select_query] assign[=] binary_operation[binary_operation[binary_operation[constant[SELECT name_records.* FROM name_records JOIN namespaces ON name_records.namespace_id = namespaces.namespace_id ] + constant[WHERE name = ? AND ]] + name[unexpired_fragment]] + constant[;]]
variable[args] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da20c6ab970>]] + name[unexpired_args]]
variable[name_rows] assign[=] call[name[namedb_query_execute], parameter[name[cur], name[select_query], name[args]]]
variable[name_row] assign[=] call[name[name_rows].fetchone, parameter[]]
if compare[name[name_row] is constant[None]] begin[:]
return[constant[None]]
variable[name_rec] assign[=] dictionary[[], []]
call[name[name_rec].update, parameter[name[name_row]]]
if name[include_history] begin[:]
variable[name_history] assign[=] call[name[namedb_get_history], parameter[name[cur], name[name]]]
call[name[name_rec]][constant[history]] assign[=] name[name_history]
return[name[name_rec]]
|
keyword[def] identifier[namedb_get_name] ( identifier[cur] , identifier[name] , identifier[current_block] , identifier[include_expired] = keyword[False] , identifier[include_history] = keyword[True] , identifier[only_registered] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[include_expired] :
identifier[unexpired_fragment] , identifier[unexpired_args] = identifier[namedb_select_where_unexpired_names] ( identifier[current_block] , identifier[only_registered] = identifier[only_registered] )
identifier[select_query] = literal[string] + literal[string] + identifier[unexpired_fragment] + literal[string]
identifier[args] =( identifier[name] ,)+ identifier[unexpired_args]
keyword[else] :
identifier[select_query] = literal[string]
identifier[args] =( identifier[name] ,)
identifier[name_rows] = identifier[namedb_query_execute] ( identifier[cur] , identifier[select_query] , identifier[args] )
identifier[name_row] = identifier[name_rows] . identifier[fetchone] ()
keyword[if] identifier[name_row] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[name_rec] ={}
identifier[name_rec] . identifier[update] ( identifier[name_row] )
keyword[if] identifier[include_history] :
identifier[name_history] = identifier[namedb_get_history] ( identifier[cur] , identifier[name] )
identifier[name_rec] [ literal[string] ]= identifier[name_history]
keyword[return] identifier[name_rec]
|
def namedb_get_name(cur, name, current_block, include_expired=False, include_history=True, only_registered=True):
"""
Get a name and all of its history. Note: will return a revoked name
Return the name + history on success
Return None if the name doesn't exist, or is expired (NOTE: will return a revoked name)
"""
if not include_expired:
(unexpired_fragment, unexpired_args) = namedb_select_where_unexpired_names(current_block, only_registered=only_registered)
select_query = 'SELECT name_records.* FROM name_records JOIN namespaces ON name_records.namespace_id = namespaces.namespace_id ' + 'WHERE name = ? AND ' + unexpired_fragment + ';'
args = (name,) + unexpired_args # depends on [control=['if'], data=[]]
else:
select_query = 'SELECT * FROM name_records WHERE name = ?;'
args = (name,)
# log.debug(namedb_format_query(select_query, args))
name_rows = namedb_query_execute(cur, select_query, args)
name_row = name_rows.fetchone()
if name_row is None:
# no such name
return None # depends on [control=['if'], data=[]]
name_rec = {}
name_rec.update(name_row)
if include_history:
name_history = namedb_get_history(cur, name)
name_rec['history'] = name_history # depends on [control=['if'], data=[]]
return name_rec
|
def cybox_valueset_fact_handler(self, enrichment, fact, attr_info, add_fact_kargs):
"""
Handler for dealing with 'value_set' values.
Unfortunately, CybOX et al. sometimes use comma-separated
value lists rather than an XML structure that can contain
several values.
This handler is called for elements concerning a value-set
such as the following example::
<URIObj:Value condition="IsInSet"
value_set="www.sample1.com/index.html, sample2.com/login.html, dev.sample3.com/index/kb.html"
datatype="AnyURI"/>
"""
value_list = attr_info['value_set'][fact['node_id']].split(",")
value_list = map(lambda x: x.strip(), value_list)
add_fact_kargs['values'] = value_list
return True
|
def function[cybox_valueset_fact_handler, parameter[self, enrichment, fact, attr_info, add_fact_kargs]]:
constant[
Handler for dealing with 'value_set' values.
Unfortunately, CybOX et al. sometimes use comma-separated
value lists rather than an XML structure that can contain
several values.
This handler is called for elements concerning a value-set
such as the following example::
<URIObj:Value condition="IsInSet"
value_set="www.sample1.com/index.html, sample2.com/login.html, dev.sample3.com/index/kb.html"
datatype="AnyURI"/>
]
variable[value_list] assign[=] call[call[call[name[attr_info]][constant[value_set]]][call[name[fact]][constant[node_id]]].split, parameter[constant[,]]]
variable[value_list] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b0a83580>, name[value_list]]]
call[name[add_fact_kargs]][constant[values]] assign[=] name[value_list]
return[constant[True]]
|
keyword[def] identifier[cybox_valueset_fact_handler] ( identifier[self] , identifier[enrichment] , identifier[fact] , identifier[attr_info] , identifier[add_fact_kargs] ):
literal[string]
identifier[value_list] = identifier[attr_info] [ literal[string] ][ identifier[fact] [ literal[string] ]]. identifier[split] ( literal[string] )
identifier[value_list] = identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[strip] (), identifier[value_list] )
identifier[add_fact_kargs] [ literal[string] ]= identifier[value_list]
keyword[return] keyword[True]
|
def cybox_valueset_fact_handler(self, enrichment, fact, attr_info, add_fact_kargs):
"""
Handler for dealing with 'value_set' values.
Unfortunately, CybOX et al. sometimes use comma-separated
value lists rather than an XML structure that can contain
several values.
This handler is called for elements concerning a value-set
such as the following example::
<URIObj:Value condition="IsInSet"
value_set="www.sample1.com/index.html, sample2.com/login.html, dev.sample3.com/index/kb.html"
datatype="AnyURI"/>
"""
value_list = attr_info['value_set'][fact['node_id']].split(',')
value_list = map(lambda x: x.strip(), value_list)
add_fact_kargs['values'] = value_list
return True
|
def do_filter(self, arg):
"""Sets the filter for the test cases to include in the plot/table by name. Only those
test cases that include this text are included in plots, tables etc."""
if arg == "list":
msg.info("TEST CASE FILTERS")
for f in self.curargs["tfilter"]:
if f == "*":
msg.info(" * (default, matches all)")
else:
msg.info(" " + f)
elif arg not in self.curargs["tfilter"]:
self.curargs["tfilter"].append(arg)
self.do_filter("list")
|
def function[do_filter, parameter[self, arg]]:
constant[Sets the filter for the test cases to include in the plot/table by name. Only those
test cases that include this text are included in plots, tables etc.]
if compare[name[arg] equal[==] constant[list]] begin[:]
call[name[msg].info, parameter[constant[TEST CASE FILTERS]]]
for taget[name[f]] in starred[call[name[self].curargs][constant[tfilter]]] begin[:]
if compare[name[f] equal[==] constant[*]] begin[:]
call[name[msg].info, parameter[constant[ * (default, matches all)]]]
|
keyword[def] identifier[do_filter] ( identifier[self] , identifier[arg] ):
literal[string]
keyword[if] identifier[arg] == literal[string] :
identifier[msg] . identifier[info] ( literal[string] )
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[curargs] [ literal[string] ]:
keyword[if] identifier[f] == literal[string] :
identifier[msg] . identifier[info] ( literal[string] )
keyword[else] :
identifier[msg] . identifier[info] ( literal[string] + identifier[f] )
keyword[elif] identifier[arg] keyword[not] keyword[in] identifier[self] . identifier[curargs] [ literal[string] ]:
identifier[self] . identifier[curargs] [ literal[string] ]. identifier[append] ( identifier[arg] )
identifier[self] . identifier[do_filter] ( literal[string] )
|
def do_filter(self, arg):
"""Sets the filter for the test cases to include in the plot/table by name. Only those
test cases that include this text are included in plots, tables etc."""
if arg == 'list':
msg.info('TEST CASE FILTERS')
for f in self.curargs['tfilter']:
if f == '*':
msg.info(' * (default, matches all)') # depends on [control=['if'], data=[]]
else:
msg.info(' ' + f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
elif arg not in self.curargs['tfilter']:
self.curargs['tfilter'].append(arg)
self.do_filter('list') # depends on [control=['if'], data=['arg']]
|
def CreateChatWith(self, *Usernames):
"""Creates a chat with one or more users.
:Parameters:
Usernames : str
One or more Skypenames of the users.
:return: A chat object
:rtype: `Chat`
:see: `Chat.AddMembers`
"""
return Chat(self, chop(self._DoCommand('CHAT CREATE %s' % ', '.join(Usernames)), 2)[1])
|
def function[CreateChatWith, parameter[self]]:
constant[Creates a chat with one or more users.
:Parameters:
Usernames : str
One or more Skypenames of the users.
:return: A chat object
:rtype: `Chat`
:see: `Chat.AddMembers`
]
return[call[name[Chat], parameter[name[self], call[call[name[chop], parameter[call[name[self]._DoCommand, parameter[binary_operation[constant[CHAT CREATE %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[Usernames]]]]]], constant[2]]]][constant[1]]]]]
|
keyword[def] identifier[CreateChatWith] ( identifier[self] ,* identifier[Usernames] ):
literal[string]
keyword[return] identifier[Chat] ( identifier[self] , identifier[chop] ( identifier[self] . identifier[_DoCommand] ( literal[string] % literal[string] . identifier[join] ( identifier[Usernames] )), literal[int] )[ literal[int] ])
|
def CreateChatWith(self, *Usernames):
"""Creates a chat with one or more users.
:Parameters:
Usernames : str
One or more Skypenames of the users.
:return: A chat object
:rtype: `Chat`
:see: `Chat.AddMembers`
"""
return Chat(self, chop(self._DoCommand('CHAT CREATE %s' % ', '.join(Usernames)), 2)[1])
|
def _report_command(self, cmd, procs=None):
"""
Writes a command to both stdout and to the commands log file
(self.pipeline_commands_file).
:param str cmd: command to report
:param str | list[str] procs: process numbers for processes in the command
"""
if isinstance(procs, list):
procs = ",".join(map(str,procs))
if procs:
line = "\n> `{cmd}` ({procs})\n".format(cmd=str(cmd), procs=procs)
else:
line = "\n> `{cmd}`\n".format(cmd=str(cmd))
print(line)
with open(self.pipeline_commands_file, "a") as myfile:
myfile.write(line + "\n\n")
|
def function[_report_command, parameter[self, cmd, procs]]:
constant[
Writes a command to both stdout and to the commands log file
(self.pipeline_commands_file).
:param str cmd: command to report
:param str | list[str] procs: process numbers for processes in the command
]
if call[name[isinstance], parameter[name[procs], name[list]]] begin[:]
variable[procs] assign[=] call[constant[,].join, parameter[call[name[map], parameter[name[str], name[procs]]]]]
if name[procs] begin[:]
variable[line] assign[=] call[constant[
> `{cmd}` ({procs})
].format, parameter[]]
call[name[print], parameter[name[line]]]
with call[name[open], parameter[name[self].pipeline_commands_file, constant[a]]] begin[:]
call[name[myfile].write, parameter[binary_operation[name[line] + constant[
]]]]
|
keyword[def] identifier[_report_command] ( identifier[self] , identifier[cmd] , identifier[procs] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[procs] , identifier[list] ):
identifier[procs] = literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[procs] ))
keyword[if] identifier[procs] :
identifier[line] = literal[string] . identifier[format] ( identifier[cmd] = identifier[str] ( identifier[cmd] ), identifier[procs] = identifier[procs] )
keyword[else] :
identifier[line] = literal[string] . identifier[format] ( identifier[cmd] = identifier[str] ( identifier[cmd] ))
identifier[print] ( identifier[line] )
keyword[with] identifier[open] ( identifier[self] . identifier[pipeline_commands_file] , literal[string] ) keyword[as] identifier[myfile] :
identifier[myfile] . identifier[write] ( identifier[line] + literal[string] )
|
def _report_command(self, cmd, procs=None):
"""
Writes a command to both stdout and to the commands log file
(self.pipeline_commands_file).
:param str cmd: command to report
:param str | list[str] procs: process numbers for processes in the command
"""
if isinstance(procs, list):
procs = ','.join(map(str, procs)) # depends on [control=['if'], data=[]]
if procs:
line = '\n> `{cmd}` ({procs})\n'.format(cmd=str(cmd), procs=procs) # depends on [control=['if'], data=[]]
else:
line = '\n> `{cmd}`\n'.format(cmd=str(cmd))
print(line)
with open(self.pipeline_commands_file, 'a') as myfile:
myfile.write(line + '\n\n') # depends on [control=['with'], data=['myfile']]
|
def deduplicate(args):
"""
%prog deduplicate fastafile
Wraps `cd-hit-est` to remove duplicate sequences.
"""
p = OptionParser(deduplicate.__doc__)
p.set_align(pctid=96, pctcov=0)
p.add_option("--fast", default=False, action="store_true",
help="Place sequence in the first cluster")
p.add_option("--consensus", default=False, action="store_true",
help="Compute consensus sequences")
p.add_option("--reads", default=False, action="store_true",
help="Use `cd-hit-454` to deduplicate [default: %default]")
p.add_option("--samestrand", default=False, action="store_true",
help="Enforce same strand alignment")
p.set_home("cdhit")
p.set_cpus()
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
fastafile, = args
identity = opts.pctid / 100.
fastafile, qualfile = fasta([fastafile, "--seqtk"])
ocmd = "cd-hit-454" if opts.reads else "cd-hit-est"
cmd = op.join(opts.cdhit_home, ocmd)
cmd += " -c {0}".format(identity)
if ocmd == "cd-hit-est":
cmd += " -d 0" # include complete defline
if opts.samestrand:
cmd += " -r 0"
if not opts.fast:
cmd += " -g 1"
if opts.pctcov != 0:
cmd += " -aL {0} -aS {0}".format(opts.pctcov / 100.)
dd = fastafile + ".P{0}.cdhit".format(opts.pctid)
clstr = dd + ".clstr"
cmd += " -M 0 -T {0} -i {1} -o {2}".format(opts.cpus, fastafile, dd)
if need_update(fastafile, (dd, clstr)):
sh(cmd)
if opts.consensus:
cons = dd + ".consensus"
cmd = op.join(opts.cdhit_home, "cdhit-cluster-consensus")
cmd += " clustfile={0} fastafile={1} output={2} maxlen=1".\
format(clstr, fastafile, cons)
if need_update((clstr, fastafile), cons):
sh(cmd)
return dd
|
def function[deduplicate, parameter[args]]:
constant[
%prog deduplicate fastafile
Wraps `cd-hit-est` to remove duplicate sequences.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[deduplicate].__doc__]]
call[name[p].set_align, parameter[]]
call[name[p].add_option, parameter[constant[--fast]]]
call[name[p].add_option, parameter[constant[--consensus]]]
call[name[p].add_option, parameter[constant[--reads]]]
call[name[p].add_option, parameter[constant[--samestrand]]]
call[name[p].set_home, parameter[constant[cdhit]]]
call[name[p].set_cpus, parameter[]]
<ast.Tuple object at 0x7da1b2345f60> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b2347c10>]]
<ast.Tuple object at 0x7da1b2346950> assign[=] name[args]
variable[identity] assign[=] binary_operation[name[opts].pctid / constant[100.0]]
<ast.Tuple object at 0x7da1b23463b0> assign[=] call[name[fasta], parameter[list[[<ast.Name object at 0x7da1b23447c0>, <ast.Constant object at 0x7da1b2345c90>]]]]
variable[ocmd] assign[=] <ast.IfExp object at 0x7da1b2344970>
variable[cmd] assign[=] call[name[op].join, parameter[name[opts].cdhit_home, name[ocmd]]]
<ast.AugAssign object at 0x7da1b2344b80>
if compare[name[ocmd] equal[==] constant[cd-hit-est]] begin[:]
<ast.AugAssign object at 0x7da1b2344ee0>
if name[opts].samestrand begin[:]
<ast.AugAssign object at 0x7da1b2347550>
if <ast.UnaryOp object at 0x7da1b2347190> begin[:]
<ast.AugAssign object at 0x7da1b2347970>
if compare[name[opts].pctcov not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b2344b20>
variable[dd] assign[=] binary_operation[name[fastafile] + call[constant[.P{0}.cdhit].format, parameter[name[opts].pctid]]]
variable[clstr] assign[=] binary_operation[name[dd] + constant[.clstr]]
<ast.AugAssign object at 0x7da1b23448b0>
if call[name[need_update], parameter[name[fastafile], tuple[[<ast.Name object at 0x7da1b2347070>, <ast.Name object at 0x7da1b23458a0>]]]] begin[:]
call[name[sh], parameter[name[cmd]]]
if name[opts].consensus begin[:]
variable[cons] assign[=] binary_operation[name[dd] + constant[.consensus]]
variable[cmd] assign[=] call[name[op].join, parameter[name[opts].cdhit_home, constant[cdhit-cluster-consensus]]]
<ast.AugAssign object at 0x7da1b2344400>
if call[name[need_update], parameter[tuple[[<ast.Name object at 0x7da2041dae60>, <ast.Name object at 0x7da2041da7d0>]], name[cons]]] begin[:]
call[name[sh], parameter[name[cmd]]]
return[name[dd]]
|
keyword[def] identifier[deduplicate] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[deduplicate] . identifier[__doc__] )
identifier[p] . identifier[set_align] ( identifier[pctid] = literal[int] , identifier[pctcov] = literal[int] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_home] ( literal[string] )
identifier[p] . identifier[set_cpus] ()
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[fastafile] ,= identifier[args]
identifier[identity] = identifier[opts] . identifier[pctid] / literal[int]
identifier[fastafile] , identifier[qualfile] = identifier[fasta] ([ identifier[fastafile] , literal[string] ])
identifier[ocmd] = literal[string] keyword[if] identifier[opts] . identifier[reads] keyword[else] literal[string]
identifier[cmd] = identifier[op] . identifier[join] ( identifier[opts] . identifier[cdhit_home] , identifier[ocmd] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[identity] )
keyword[if] identifier[ocmd] == literal[string] :
identifier[cmd] += literal[string]
keyword[if] identifier[opts] . identifier[samestrand] :
identifier[cmd] += literal[string]
keyword[if] keyword[not] identifier[opts] . identifier[fast] :
identifier[cmd] += literal[string]
keyword[if] identifier[opts] . identifier[pctcov] != literal[int] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[pctcov] / literal[int] )
identifier[dd] = identifier[fastafile] + literal[string] . identifier[format] ( identifier[opts] . identifier[pctid] )
identifier[clstr] = identifier[dd] + literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[cpus] , identifier[fastafile] , identifier[dd] )
keyword[if] identifier[need_update] ( identifier[fastafile] ,( identifier[dd] , identifier[clstr] )):
identifier[sh] ( identifier[cmd] )
keyword[if] identifier[opts] . identifier[consensus] :
identifier[cons] = identifier[dd] + literal[string]
identifier[cmd] = identifier[op] . identifier[join] ( identifier[opts] . identifier[cdhit_home] , literal[string] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[clstr] , identifier[fastafile] , identifier[cons] )
keyword[if] identifier[need_update] (( identifier[clstr] , identifier[fastafile] ), identifier[cons] ):
identifier[sh] ( identifier[cmd] )
keyword[return] identifier[dd]
|
def deduplicate(args):
"""
%prog deduplicate fastafile
Wraps `cd-hit-est` to remove duplicate sequences.
"""
p = OptionParser(deduplicate.__doc__)
p.set_align(pctid=96, pctcov=0)
p.add_option('--fast', default=False, action='store_true', help='Place sequence in the first cluster')
p.add_option('--consensus', default=False, action='store_true', help='Compute consensus sequences')
p.add_option('--reads', default=False, action='store_true', help='Use `cd-hit-454` to deduplicate [default: %default]')
p.add_option('--samestrand', default=False, action='store_true', help='Enforce same strand alignment')
p.set_home('cdhit')
p.set_cpus()
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(fastafile,) = args
identity = opts.pctid / 100.0
(fastafile, qualfile) = fasta([fastafile, '--seqtk'])
ocmd = 'cd-hit-454' if opts.reads else 'cd-hit-est'
cmd = op.join(opts.cdhit_home, ocmd)
cmd += ' -c {0}'.format(identity)
if ocmd == 'cd-hit-est':
cmd += ' -d 0' # include complete defline
if opts.samestrand:
cmd += ' -r 0' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not opts.fast:
cmd += ' -g 1' # depends on [control=['if'], data=[]]
if opts.pctcov != 0:
cmd += ' -aL {0} -aS {0}'.format(opts.pctcov / 100.0) # depends on [control=['if'], data=[]]
dd = fastafile + '.P{0}.cdhit'.format(opts.pctid)
clstr = dd + '.clstr'
cmd += ' -M 0 -T {0} -i {1} -o {2}'.format(opts.cpus, fastafile, dd)
if need_update(fastafile, (dd, clstr)):
sh(cmd) # depends on [control=['if'], data=[]]
if opts.consensus:
cons = dd + '.consensus'
cmd = op.join(opts.cdhit_home, 'cdhit-cluster-consensus')
cmd += ' clustfile={0} fastafile={1} output={2} maxlen=1'.format(clstr, fastafile, cons)
if need_update((clstr, fastafile), cons):
sh(cmd) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return dd
|
def is60(msg):
"""Check if a message is likely to be BDS code 6,0
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False
"""
if allzeros(msg):
return False
d = hex2bin(data(msg))
# status bit 1, 13, 24, 35, 46
if wrongstatus(d, 1, 2, 12):
return False
if wrongstatus(d, 13, 14, 23):
return False
if wrongstatus(d, 24, 25, 34):
return False
if wrongstatus(d, 35, 36, 45):
return False
if wrongstatus(d, 46, 47, 56):
return False
ias = ias60(msg)
if ias is not None and ias > 500:
return False
mach = mach60(msg)
if mach is not None and mach > 1:
return False
vr_baro = vr60baro(msg)
if vr_baro is not None and abs(vr_baro) > 6000:
return False
vr_ins = vr60ins(msg)
if vr_ins is not None and abs(vr_ins) > 6000:
return False
return True
|
def function[is60, parameter[msg]]:
constant[Check if a message is likely to be BDS code 6,0
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False
]
if call[name[allzeros], parameter[name[msg]]] begin[:]
return[constant[False]]
variable[d] assign[=] call[name[hex2bin], parameter[call[name[data], parameter[name[msg]]]]]
if call[name[wrongstatus], parameter[name[d], constant[1], constant[2], constant[12]]] begin[:]
return[constant[False]]
if call[name[wrongstatus], parameter[name[d], constant[13], constant[14], constant[23]]] begin[:]
return[constant[False]]
if call[name[wrongstatus], parameter[name[d], constant[24], constant[25], constant[34]]] begin[:]
return[constant[False]]
if call[name[wrongstatus], parameter[name[d], constant[35], constant[36], constant[45]]] begin[:]
return[constant[False]]
if call[name[wrongstatus], parameter[name[d], constant[46], constant[47], constant[56]]] begin[:]
return[constant[False]]
variable[ias] assign[=] call[name[ias60], parameter[name[msg]]]
if <ast.BoolOp object at 0x7da18dc04580> begin[:]
return[constant[False]]
variable[mach] assign[=] call[name[mach60], parameter[name[msg]]]
if <ast.BoolOp object at 0x7da18dc05c00> begin[:]
return[constant[False]]
variable[vr_baro] assign[=] call[name[vr60baro], parameter[name[msg]]]
if <ast.BoolOp object at 0x7da18ede76a0> begin[:]
return[constant[False]]
variable[vr_ins] assign[=] call[name[vr60ins], parameter[name[msg]]]
if <ast.BoolOp object at 0x7da18ede66b0> begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[is60] ( identifier[msg] ):
literal[string]
keyword[if] identifier[allzeros] ( identifier[msg] ):
keyword[return] keyword[False]
identifier[d] = identifier[hex2bin] ( identifier[data] ( identifier[msg] ))
keyword[if] identifier[wrongstatus] ( identifier[d] , literal[int] , literal[int] , literal[int] ):
keyword[return] keyword[False]
keyword[if] identifier[wrongstatus] ( identifier[d] , literal[int] , literal[int] , literal[int] ):
keyword[return] keyword[False]
keyword[if] identifier[wrongstatus] ( identifier[d] , literal[int] , literal[int] , literal[int] ):
keyword[return] keyword[False]
keyword[if] identifier[wrongstatus] ( identifier[d] , literal[int] , literal[int] , literal[int] ):
keyword[return] keyword[False]
keyword[if] identifier[wrongstatus] ( identifier[d] , literal[int] , literal[int] , literal[int] ):
keyword[return] keyword[False]
identifier[ias] = identifier[ias60] ( identifier[msg] )
keyword[if] identifier[ias] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ias] > literal[int] :
keyword[return] keyword[False]
identifier[mach] = identifier[mach60] ( identifier[msg] )
keyword[if] identifier[mach] keyword[is] keyword[not] keyword[None] keyword[and] identifier[mach] > literal[int] :
keyword[return] keyword[False]
identifier[vr_baro] = identifier[vr60baro] ( identifier[msg] )
keyword[if] identifier[vr_baro] keyword[is] keyword[not] keyword[None] keyword[and] identifier[abs] ( identifier[vr_baro] )> literal[int] :
keyword[return] keyword[False]
identifier[vr_ins] = identifier[vr60ins] ( identifier[msg] )
keyword[if] identifier[vr_ins] keyword[is] keyword[not] keyword[None] keyword[and] identifier[abs] ( identifier[vr_ins] )> literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def is60(msg):
"""Check if a message is likely to be BDS code 6,0
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False
"""
if allzeros(msg):
return False # depends on [control=['if'], data=[]]
d = hex2bin(data(msg))
# status bit 1, 13, 24, 35, 46
if wrongstatus(d, 1, 2, 12):
return False # depends on [control=['if'], data=[]]
if wrongstatus(d, 13, 14, 23):
return False # depends on [control=['if'], data=[]]
if wrongstatus(d, 24, 25, 34):
return False # depends on [control=['if'], data=[]]
if wrongstatus(d, 35, 36, 45):
return False # depends on [control=['if'], data=[]]
if wrongstatus(d, 46, 47, 56):
return False # depends on [control=['if'], data=[]]
ias = ias60(msg)
if ias is not None and ias > 500:
return False # depends on [control=['if'], data=[]]
mach = mach60(msg)
if mach is not None and mach > 1:
return False # depends on [control=['if'], data=[]]
vr_baro = vr60baro(msg)
if vr_baro is not None and abs(vr_baro) > 6000:
return False # depends on [control=['if'], data=[]]
vr_ins = vr60ins(msg)
if vr_ins is not None and abs(vr_ins) > 6000:
return False # depends on [control=['if'], data=[]]
return True
|
def node_to_nodal_planes(node):
"""
Parses the nodal plane distribution to a PMF
"""
if not len(node):
return None
npd_pmf = []
for plane in node.nodes:
if not all(plane.attrib[key] for key in plane.attrib):
# One plane fails - return None
return None
npd = NodalPlane(float(plane.attrib["strike"]),
float(plane.attrib["dip"]),
float(plane.attrib["rake"]))
npd_pmf.append((float(plane.attrib["probability"]), npd))
return PMF(npd_pmf)
|
def function[node_to_nodal_planes, parameter[node]]:
constant[
Parses the nodal plane distribution to a PMF
]
if <ast.UnaryOp object at 0x7da18f811b40> begin[:]
return[constant[None]]
variable[npd_pmf] assign[=] list[[]]
for taget[name[plane]] in starred[name[node].nodes] begin[:]
if <ast.UnaryOp object at 0x7da18f8113c0> begin[:]
return[constant[None]]
variable[npd] assign[=] call[name[NodalPlane], parameter[call[name[float], parameter[call[name[plane].attrib][constant[strike]]]], call[name[float], parameter[call[name[plane].attrib][constant[dip]]]], call[name[float], parameter[call[name[plane].attrib][constant[rake]]]]]]
call[name[npd_pmf].append, parameter[tuple[[<ast.Call object at 0x7da18f813550>, <ast.Name object at 0x7da18f8101c0>]]]]
return[call[name[PMF], parameter[name[npd_pmf]]]]
|
keyword[def] identifier[node_to_nodal_planes] ( identifier[node] ):
literal[string]
keyword[if] keyword[not] identifier[len] ( identifier[node] ):
keyword[return] keyword[None]
identifier[npd_pmf] =[]
keyword[for] identifier[plane] keyword[in] identifier[node] . identifier[nodes] :
keyword[if] keyword[not] identifier[all] ( identifier[plane] . identifier[attrib] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[plane] . identifier[attrib] ):
keyword[return] keyword[None]
identifier[npd] = identifier[NodalPlane] ( identifier[float] ( identifier[plane] . identifier[attrib] [ literal[string] ]),
identifier[float] ( identifier[plane] . identifier[attrib] [ literal[string] ]),
identifier[float] ( identifier[plane] . identifier[attrib] [ literal[string] ]))
identifier[npd_pmf] . identifier[append] (( identifier[float] ( identifier[plane] . identifier[attrib] [ literal[string] ]), identifier[npd] ))
keyword[return] identifier[PMF] ( identifier[npd_pmf] )
|
def node_to_nodal_planes(node):
"""
Parses the nodal plane distribution to a PMF
"""
if not len(node):
return None # depends on [control=['if'], data=[]]
npd_pmf = []
for plane in node.nodes:
if not all((plane.attrib[key] for key in plane.attrib)):
# One plane fails - return None
return None # depends on [control=['if'], data=[]]
npd = NodalPlane(float(plane.attrib['strike']), float(plane.attrib['dip']), float(plane.attrib['rake']))
npd_pmf.append((float(plane.attrib['probability']), npd)) # depends on [control=['for'], data=['plane']]
return PMF(npd_pmf)
|
def read_array(fd, n_row, n_col, dtype):
"""
Read a NumPy array of shape `(n_row, n_col)` from the given file
object and cast it to type `dtype`.
If `n_col` is None, determine the number of columns automatically.
"""
if n_col is None:
idx = fd.tell()
row = fd.readline().split()
fd.seek(idx)
n_col = len(row)
count = n_row * n_col
val = nm.fromfile(fd, sep=' ', count=count)
if val.shape[0] < count:
raise ValueError('(%d, %d) array reading failed!' % (n_row, n_col))
val = nm.asarray(val, dtype=dtype)
val.shape = (n_row, n_col)
return val
|
def function[read_array, parameter[fd, n_row, n_col, dtype]]:
constant[
Read a NumPy array of shape `(n_row, n_col)` from the given file
object and cast it to type `dtype`.
If `n_col` is None, determine the number of columns automatically.
]
if compare[name[n_col] is constant[None]] begin[:]
variable[idx] assign[=] call[name[fd].tell, parameter[]]
variable[row] assign[=] call[call[name[fd].readline, parameter[]].split, parameter[]]
call[name[fd].seek, parameter[name[idx]]]
variable[n_col] assign[=] call[name[len], parameter[name[row]]]
variable[count] assign[=] binary_operation[name[n_row] * name[n_col]]
variable[val] assign[=] call[name[nm].fromfile, parameter[name[fd]]]
if compare[call[name[val].shape][constant[0]] less[<] name[count]] begin[:]
<ast.Raise object at 0x7da18f723670>
variable[val] assign[=] call[name[nm].asarray, parameter[name[val]]]
name[val].shape assign[=] tuple[[<ast.Name object at 0x7da18f722e60>, <ast.Name object at 0x7da18f723580>]]
return[name[val]]
|
keyword[def] identifier[read_array] ( identifier[fd] , identifier[n_row] , identifier[n_col] , identifier[dtype] ):
literal[string]
keyword[if] identifier[n_col] keyword[is] keyword[None] :
identifier[idx] = identifier[fd] . identifier[tell] ()
identifier[row] = identifier[fd] . identifier[readline] (). identifier[split] ()
identifier[fd] . identifier[seek] ( identifier[idx] )
identifier[n_col] = identifier[len] ( identifier[row] )
identifier[count] = identifier[n_row] * identifier[n_col]
identifier[val] = identifier[nm] . identifier[fromfile] ( identifier[fd] , identifier[sep] = literal[string] , identifier[count] = identifier[count] )
keyword[if] identifier[val] . identifier[shape] [ literal[int] ]< identifier[count] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[n_row] , identifier[n_col] ))
identifier[val] = identifier[nm] . identifier[asarray] ( identifier[val] , identifier[dtype] = identifier[dtype] )
identifier[val] . identifier[shape] =( identifier[n_row] , identifier[n_col] )
keyword[return] identifier[val]
|
def read_array(fd, n_row, n_col, dtype):
"""
Read a NumPy array of shape `(n_row, n_col)` from the given file
object and cast it to type `dtype`.
If `n_col` is None, determine the number of columns automatically.
"""
if n_col is None:
idx = fd.tell()
row = fd.readline().split()
fd.seek(idx)
n_col = len(row) # depends on [control=['if'], data=['n_col']]
count = n_row * n_col
val = nm.fromfile(fd, sep=' ', count=count)
if val.shape[0] < count:
raise ValueError('(%d, %d) array reading failed!' % (n_row, n_col)) # depends on [control=['if'], data=[]]
val = nm.asarray(val, dtype=dtype)
val.shape = (n_row, n_col)
return val
|
def add_path_part(url, regex=PATH_PART):
"""
replace the variables in a url template with regex named groups
:param url: string of a url template
:param regex: regex of the named group
:returns: regex
"""
formatter = string.Formatter()
url_var_template = "(?P<{var_name}>{regex})"
for part in formatter.parse(url):
string_part, var_name, _, _ = part
if string_part:
yield string_part
if var_name:
yield url_var_template.format(var_name=var_name, regex=regex)
|
def function[add_path_part, parameter[url, regex]]:
constant[
replace the variables in a url template with regex named groups
:param url: string of a url template
:param regex: regex of the named group
:returns: regex
]
variable[formatter] assign[=] call[name[string].Formatter, parameter[]]
variable[url_var_template] assign[=] constant[(?P<{var_name}>{regex})]
for taget[name[part]] in starred[call[name[formatter].parse, parameter[name[url]]]] begin[:]
<ast.Tuple object at 0x7da1b09491e0> assign[=] name[part]
if name[string_part] begin[:]
<ast.Yield object at 0x7da1b0948310>
if name[var_name] begin[:]
<ast.Yield object at 0x7da1b0949840>
|
keyword[def] identifier[add_path_part] ( identifier[url] , identifier[regex] = identifier[PATH_PART] ):
literal[string]
identifier[formatter] = identifier[string] . identifier[Formatter] ()
identifier[url_var_template] = literal[string]
keyword[for] identifier[part] keyword[in] identifier[formatter] . identifier[parse] ( identifier[url] ):
identifier[string_part] , identifier[var_name] , identifier[_] , identifier[_] = identifier[part]
keyword[if] identifier[string_part] :
keyword[yield] identifier[string_part]
keyword[if] identifier[var_name] :
keyword[yield] identifier[url_var_template] . identifier[format] ( identifier[var_name] = identifier[var_name] , identifier[regex] = identifier[regex] )
|
def add_path_part(url, regex=PATH_PART):
"""
replace the variables in a url template with regex named groups
:param url: string of a url template
:param regex: regex of the named group
:returns: regex
"""
formatter = string.Formatter()
url_var_template = '(?P<{var_name}>{regex})'
for part in formatter.parse(url):
(string_part, var_name, _, _) = part
if string_part:
yield string_part # depends on [control=['if'], data=[]]
if var_name:
yield url_var_template.format(var_name=var_name, regex=regex) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['part']]
|
def read(self, handle):
'''Read binary data for this parameter from a file handle.
This reads exactly enough data from the current position in the file to
initialize the parameter.
'''
self.bytes_per_element, = struct.unpack('b', handle.read(1))
dims, = struct.unpack('B', handle.read(1))
self.dimensions = [struct.unpack('B', handle.read(1))[0] for _ in range(dims)]
self.bytes = b''
if self.total_bytes:
self.bytes = handle.read(self.total_bytes)
size, = struct.unpack('B', handle.read(1))
self.desc = size and handle.read(size).decode('utf-8') or ''
|
def function[read, parameter[self, handle]]:
constant[Read binary data for this parameter from a file handle.
This reads exactly enough data from the current position in the file to
initialize the parameter.
]
<ast.Tuple object at 0x7da207f99a20> assign[=] call[name[struct].unpack, parameter[constant[b], call[name[handle].read, parameter[constant[1]]]]]
<ast.Tuple object at 0x7da207f9ac80> assign[=] call[name[struct].unpack, parameter[constant[B], call[name[handle].read, parameter[constant[1]]]]]
name[self].dimensions assign[=] <ast.ListComp object at 0x7da207f9a440>
name[self].bytes assign[=] constant[b'']
if name[self].total_bytes begin[:]
name[self].bytes assign[=] call[name[handle].read, parameter[name[self].total_bytes]]
<ast.Tuple object at 0x7da207f982b0> assign[=] call[name[struct].unpack, parameter[constant[B], call[name[handle].read, parameter[constant[1]]]]]
name[self].desc assign[=] <ast.BoolOp object at 0x7da207f98df0>
|
keyword[def] identifier[read] ( identifier[self] , identifier[handle] ):
literal[string]
identifier[self] . identifier[bytes_per_element] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[handle] . identifier[read] ( literal[int] ))
identifier[dims] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[handle] . identifier[read] ( literal[int] ))
identifier[self] . identifier[dimensions] =[ identifier[struct] . identifier[unpack] ( literal[string] , identifier[handle] . identifier[read] ( literal[int] ))[ literal[int] ] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[dims] )]
identifier[self] . identifier[bytes] = literal[string]
keyword[if] identifier[self] . identifier[total_bytes] :
identifier[self] . identifier[bytes] = identifier[handle] . identifier[read] ( identifier[self] . identifier[total_bytes] )
identifier[size] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[handle] . identifier[read] ( literal[int] ))
identifier[self] . identifier[desc] = identifier[size] keyword[and] identifier[handle] . identifier[read] ( identifier[size] ). identifier[decode] ( literal[string] ) keyword[or] literal[string]
|
def read(self, handle):
"""Read binary data for this parameter from a file handle.
This reads exactly enough data from the current position in the file to
initialize the parameter.
"""
(self.bytes_per_element,) = struct.unpack('b', handle.read(1))
(dims,) = struct.unpack('B', handle.read(1))
self.dimensions = [struct.unpack('B', handle.read(1))[0] for _ in range(dims)]
self.bytes = b''
if self.total_bytes:
self.bytes = handle.read(self.total_bytes) # depends on [control=['if'], data=[]]
(size,) = struct.unpack('B', handle.read(1))
self.desc = size and handle.read(size).decode('utf-8') or ''
|
def rn(shape, dtype=None, impl='numpy', **kwargs):
"""Return a space of real tensors.
Parameters
----------
shape : positive int or sequence of positive ints
Number of entries per axis for elements in this space. A
single integer results in a space with 1 axis.
dtype : optional
Data type of each element. Can be provided in any way the
`numpy.dtype` function understands, e.g. as built-in type or
as a string. Only real floating-point data types are allowed.
For ``None``, the `TensorSpace.default_dtype` of the
created space is used in the form
``default_dtype(RealNumbers())``.
impl : str, optional
Impmlementation back-end for the space. See
`odl.space.entry_points.tensor_space_impl_names` for available
options.
kwargs :
Extra keyword arguments passed to the space constructor.
Returns
-------
real_space : `TensorSpace`
Examples
--------
Space of real 3-tuples with ``float32`` entries:
>>> odl.rn(3, dtype='float32')
rn(3, dtype='float32')
Real 2x3 tensors with ``float32`` entries:
>>> odl.rn((2, 3), dtype='float32')
rn((2, 3), dtype='float32')
The default data type depends on the implementation. For
``impl='numpy'``, it is ``'float64'``:
>>> ts = odl.rn((2, 3))
>>> ts
rn((2, 3))
>>> ts.dtype
dtype('float64')
See Also
--------
tensor_space : Space of tensors with arbitrary scalar data type.
cn : Complex tensor space.
"""
rn_cls = tensor_space_impl(impl)
if dtype is None:
dtype = rn_cls.default_dtype(RealNumbers())
# Use args by keyword since the constructor may take other arguments
# by position
rn = rn_cls(shape=shape, dtype=dtype, **kwargs)
if not rn.is_real:
raise ValueError('data type {!r} not a real floating-point type.'
''.format(dtype))
return rn
|
def function[rn, parameter[shape, dtype, impl]]:
constant[Return a space of real tensors.
Parameters
----------
shape : positive int or sequence of positive ints
Number of entries per axis for elements in this space. A
single integer results in a space with 1 axis.
dtype : optional
Data type of each element. Can be provided in any way the
`numpy.dtype` function understands, e.g. as built-in type or
as a string. Only real floating-point data types are allowed.
For ``None``, the `TensorSpace.default_dtype` of the
created space is used in the form
``default_dtype(RealNumbers())``.
impl : str, optional
Impmlementation back-end for the space. See
`odl.space.entry_points.tensor_space_impl_names` for available
options.
kwargs :
Extra keyword arguments passed to the space constructor.
Returns
-------
real_space : `TensorSpace`
Examples
--------
Space of real 3-tuples with ``float32`` entries:
>>> odl.rn(3, dtype='float32')
rn(3, dtype='float32')
Real 2x3 tensors with ``float32`` entries:
>>> odl.rn((2, 3), dtype='float32')
rn((2, 3), dtype='float32')
The default data type depends on the implementation. For
``impl='numpy'``, it is ``'float64'``:
>>> ts = odl.rn((2, 3))
>>> ts
rn((2, 3))
>>> ts.dtype
dtype('float64')
See Also
--------
tensor_space : Space of tensors with arbitrary scalar data type.
cn : Complex tensor space.
]
variable[rn_cls] assign[=] call[name[tensor_space_impl], parameter[name[impl]]]
if compare[name[dtype] is constant[None]] begin[:]
variable[dtype] assign[=] call[name[rn_cls].default_dtype, parameter[call[name[RealNumbers], parameter[]]]]
variable[rn] assign[=] call[name[rn_cls], parameter[]]
if <ast.UnaryOp object at 0x7da1b206a710> begin[:]
<ast.Raise object at 0x7da1b2069de0>
return[name[rn]]
|
keyword[def] identifier[rn] ( identifier[shape] , identifier[dtype] = keyword[None] , identifier[impl] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[rn_cls] = identifier[tensor_space_impl] ( identifier[impl] )
keyword[if] identifier[dtype] keyword[is] keyword[None] :
identifier[dtype] = identifier[rn_cls] . identifier[default_dtype] ( identifier[RealNumbers] ())
identifier[rn] = identifier[rn_cls] ( identifier[shape] = identifier[shape] , identifier[dtype] = identifier[dtype] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[rn] . identifier[is_real] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[dtype] ))
keyword[return] identifier[rn]
|
def rn(shape, dtype=None, impl='numpy', **kwargs):
"""Return a space of real tensors.
Parameters
----------
shape : positive int or sequence of positive ints
Number of entries per axis for elements in this space. A
single integer results in a space with 1 axis.
dtype : optional
Data type of each element. Can be provided in any way the
`numpy.dtype` function understands, e.g. as built-in type or
as a string. Only real floating-point data types are allowed.
For ``None``, the `TensorSpace.default_dtype` of the
created space is used in the form
``default_dtype(RealNumbers())``.
impl : str, optional
Impmlementation back-end for the space. See
`odl.space.entry_points.tensor_space_impl_names` for available
options.
kwargs :
Extra keyword arguments passed to the space constructor.
Returns
-------
real_space : `TensorSpace`
Examples
--------
Space of real 3-tuples with ``float32`` entries:
>>> odl.rn(3, dtype='float32')
rn(3, dtype='float32')
Real 2x3 tensors with ``float32`` entries:
>>> odl.rn((2, 3), dtype='float32')
rn((2, 3), dtype='float32')
The default data type depends on the implementation. For
``impl='numpy'``, it is ``'float64'``:
>>> ts = odl.rn((2, 3))
>>> ts
rn((2, 3))
>>> ts.dtype
dtype('float64')
See Also
--------
tensor_space : Space of tensors with arbitrary scalar data type.
cn : Complex tensor space.
"""
rn_cls = tensor_space_impl(impl)
if dtype is None:
dtype = rn_cls.default_dtype(RealNumbers()) # depends on [control=['if'], data=['dtype']]
# Use args by keyword since the constructor may take other arguments
# by position
rn = rn_cls(shape=shape, dtype=dtype, **kwargs)
if not rn.is_real:
raise ValueError('data type {!r} not a real floating-point type.'.format(dtype)) # depends on [control=['if'], data=[]]
return rn
|
def ValidateEmail(email, column_name=None, problems=None):
"""
checks the basic validity of email:
- an empty email is considered valid and no error or warning is issued.
- should start with any string not including @
- then should match a single @
- then matches any string not including @
- then contains a single dot
- then again matches any string after dot.
"""
if IsEmpty(email) or re.match(r'[^@]+@[^@]+\.[^@]+', email):
return True
else:
if problems:
problems.InvalidValue(column_name, email)
return False
|
def function[ValidateEmail, parameter[email, column_name, problems]]:
constant[
checks the basic validity of email:
- an empty email is considered valid and no error or warning is issued.
- should start with any string not including @
- then should match a single @
- then matches any string not including @
- then contains a single dot
- then again matches any string after dot.
]
if <ast.BoolOp object at 0x7da1b17b5360> begin[:]
return[constant[True]]
|
keyword[def] identifier[ValidateEmail] ( identifier[email] , identifier[column_name] = keyword[None] , identifier[problems] = keyword[None] ):
literal[string]
keyword[if] identifier[IsEmpty] ( identifier[email] ) keyword[or] identifier[re] . identifier[match] ( literal[string] , identifier[email] ):
keyword[return] keyword[True]
keyword[else] :
keyword[if] identifier[problems] :
identifier[problems] . identifier[InvalidValue] ( identifier[column_name] , identifier[email] )
keyword[return] keyword[False]
|
def ValidateEmail(email, column_name=None, problems=None):
"""
checks the basic validity of email:
- an empty email is considered valid and no error or warning is issued.
- should start with any string not including @
- then should match a single @
- then matches any string not including @
- then contains a single dot
- then again matches any string after dot.
"""
if IsEmpty(email) or re.match('[^@]+@[^@]+\\.[^@]+', email):
return True # depends on [control=['if'], data=[]]
else:
if problems:
problems.InvalidValue(column_name, email) # depends on [control=['if'], data=[]]
return False
|
def create_with_virtualenv(self, interpreter, virtualenv_options):
"""Create a virtualenv using the virtualenv lib."""
args = ['virtualenv', '--python', interpreter, self.env_path]
args.extend(virtualenv_options)
if not self.pip_installed:
args.insert(3, '--no-pip')
try:
helpers.logged_exec(args)
self.env_bin_path = os.path.join(self.env_path, 'bin')
except FileNotFoundError as error:
logger.error('Virtualenv is not installed. It is needed to create a virtualenv with '
'a different python version than fades (got {})'.format(error))
raise FadesError('virtualenv not found')
except helpers.ExecutionError as error:
error.dump_to_log(logger)
raise FadesError('virtualenv could not be run')
except Exception as error:
logger.exception("Error creating virtualenv: %s", error)
raise FadesError('General error while running virtualenv')
|
def function[create_with_virtualenv, parameter[self, interpreter, virtualenv_options]]:
constant[Create a virtualenv using the virtualenv lib.]
variable[args] assign[=] list[[<ast.Constant object at 0x7da1b0f59240>, <ast.Constant object at 0x7da1b0f59c30>, <ast.Name object at 0x7da1b0f5bd30>, <ast.Attribute object at 0x7da1b0f58970>]]
call[name[args].extend, parameter[name[virtualenv_options]]]
if <ast.UnaryOp object at 0x7da1b0f58c70> begin[:]
call[name[args].insert, parameter[constant[3], constant[--no-pip]]]
<ast.Try object at 0x7da1b0f59a50>
|
keyword[def] identifier[create_with_virtualenv] ( identifier[self] , identifier[interpreter] , identifier[virtualenv_options] ):
literal[string]
identifier[args] =[ literal[string] , literal[string] , identifier[interpreter] , identifier[self] . identifier[env_path] ]
identifier[args] . identifier[extend] ( identifier[virtualenv_options] )
keyword[if] keyword[not] identifier[self] . identifier[pip_installed] :
identifier[args] . identifier[insert] ( literal[int] , literal[string] )
keyword[try] :
identifier[helpers] . identifier[logged_exec] ( identifier[args] )
identifier[self] . identifier[env_bin_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[env_path] , literal[string] )
keyword[except] identifier[FileNotFoundError] keyword[as] identifier[error] :
identifier[logger] . identifier[error] ( literal[string]
literal[string] . identifier[format] ( identifier[error] ))
keyword[raise] identifier[FadesError] ( literal[string] )
keyword[except] identifier[helpers] . identifier[ExecutionError] keyword[as] identifier[error] :
identifier[error] . identifier[dump_to_log] ( identifier[logger] )
keyword[raise] identifier[FadesError] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[error] :
identifier[logger] . identifier[exception] ( literal[string] , identifier[error] )
keyword[raise] identifier[FadesError] ( literal[string] )
|
def create_with_virtualenv(self, interpreter, virtualenv_options):
"""Create a virtualenv using the virtualenv lib."""
args = ['virtualenv', '--python', interpreter, self.env_path]
args.extend(virtualenv_options)
if not self.pip_installed:
args.insert(3, '--no-pip') # depends on [control=['if'], data=[]]
try:
helpers.logged_exec(args)
self.env_bin_path = os.path.join(self.env_path, 'bin') # depends on [control=['try'], data=[]]
except FileNotFoundError as error:
logger.error('Virtualenv is not installed. It is needed to create a virtualenv with a different python version than fades (got {})'.format(error))
raise FadesError('virtualenv not found') # depends on [control=['except'], data=['error']]
except helpers.ExecutionError as error:
error.dump_to_log(logger)
raise FadesError('virtualenv could not be run') # depends on [control=['except'], data=['error']]
except Exception as error:
logger.exception('Error creating virtualenv: %s', error)
raise FadesError('General error while running virtualenv') # depends on [control=['except'], data=['error']]
|
def run(self):
"""Run the minimization.
Returns
-------
K : (N,N) ndarray
the optimal rate matrix
"""
if self.verbose:
self.selftest()
self.count = 0
if self.verbose:
logging.info('initial value of the objective function is %f'
% self.function(self.initial))
theta0 = self.initial
theta, f, d = fmin_l_bfgs_b(self.function_and_gradient, theta0, fprime=None, args=(),
approx_grad=False, bounds=self.bounds, factr=self.tol,
pgtol=1.0E-11, disp=0, maxiter=self.maxiter, maxfun=self.maxiter, maxls=100)
if self.verbose:
logging.info('l_bfgs_b says: '+str(d))
logging.info('objective function value reached: %f' % f)
if d['warnflag'] != 0:
raise_or_warn(str(d), on_error=self.on_error, warning=NotConvergedWarning, exception=NotConvergedError)
K = np.zeros((self.N, self.N))
K[self.I, self.J] = theta / self.pi[self.I]
K[self.J, self.I] = theta / self.pi[self.J]
np.fill_diagonal(K, -np.sum(K, axis=1))
self.K = K
return K
|
def function[run, parameter[self]]:
constant[Run the minimization.
Returns
-------
K : (N,N) ndarray
the optimal rate matrix
]
if name[self].verbose begin[:]
call[name[self].selftest, parameter[]]
name[self].count assign[=] constant[0]
if name[self].verbose begin[:]
call[name[logging].info, parameter[binary_operation[constant[initial value of the objective function is %f] <ast.Mod object at 0x7da2590d6920> call[name[self].function, parameter[name[self].initial]]]]]
variable[theta0] assign[=] name[self].initial
<ast.Tuple object at 0x7da1b255c130> assign[=] call[name[fmin_l_bfgs_b], parameter[name[self].function_and_gradient, name[theta0]]]
if name[self].verbose begin[:]
call[name[logging].info, parameter[binary_operation[constant[l_bfgs_b says: ] + call[name[str], parameter[name[d]]]]]]
call[name[logging].info, parameter[binary_operation[constant[objective function value reached: %f] <ast.Mod object at 0x7da2590d6920> name[f]]]]
if compare[call[name[d]][constant[warnflag]] not_equal[!=] constant[0]] begin[:]
call[name[raise_or_warn], parameter[call[name[str], parameter[name[d]]]]]
variable[K] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da1b255dcf0>, <ast.Attribute object at 0x7da1b255e860>]]]]
call[name[K]][tuple[[<ast.Attribute object at 0x7da1b264a3b0>, <ast.Attribute object at 0x7da1b264bcd0>]]] assign[=] binary_operation[name[theta] / call[name[self].pi][name[self].I]]
call[name[K]][tuple[[<ast.Attribute object at 0x7da1b264ad10>, <ast.Attribute object at 0x7da1b26490f0>]]] assign[=] binary_operation[name[theta] / call[name[self].pi][name[self].J]]
call[name[np].fill_diagonal, parameter[name[K], <ast.UnaryOp object at 0x7da1b264a9e0>]]
name[self].K assign[=] name[K]
return[name[K]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[verbose] :
identifier[self] . identifier[selftest] ()
identifier[self] . identifier[count] = literal[int]
keyword[if] identifier[self] . identifier[verbose] :
identifier[logging] . identifier[info] ( literal[string]
% identifier[self] . identifier[function] ( identifier[self] . identifier[initial] ))
identifier[theta0] = identifier[self] . identifier[initial]
identifier[theta] , identifier[f] , identifier[d] = identifier[fmin_l_bfgs_b] ( identifier[self] . identifier[function_and_gradient] , identifier[theta0] , identifier[fprime] = keyword[None] , identifier[args] =(),
identifier[approx_grad] = keyword[False] , identifier[bounds] = identifier[self] . identifier[bounds] , identifier[factr] = identifier[self] . identifier[tol] ,
identifier[pgtol] = literal[int] , identifier[disp] = literal[int] , identifier[maxiter] = identifier[self] . identifier[maxiter] , identifier[maxfun] = identifier[self] . identifier[maxiter] , identifier[maxls] = literal[int] )
keyword[if] identifier[self] . identifier[verbose] :
identifier[logging] . identifier[info] ( literal[string] + identifier[str] ( identifier[d] ))
identifier[logging] . identifier[info] ( literal[string] % identifier[f] )
keyword[if] identifier[d] [ literal[string] ]!= literal[int] :
identifier[raise_or_warn] ( identifier[str] ( identifier[d] ), identifier[on_error] = identifier[self] . identifier[on_error] , identifier[warning] = identifier[NotConvergedWarning] , identifier[exception] = identifier[NotConvergedError] )
identifier[K] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[N] , identifier[self] . identifier[N] ))
identifier[K] [ identifier[self] . identifier[I] , identifier[self] . identifier[J] ]= identifier[theta] / identifier[self] . identifier[pi] [ identifier[self] . identifier[I] ]
identifier[K] [ identifier[self] . identifier[J] , identifier[self] . identifier[I] ]= identifier[theta] / identifier[self] . identifier[pi] [ identifier[self] . identifier[J] ]
identifier[np] . identifier[fill_diagonal] ( identifier[K] ,- identifier[np] . identifier[sum] ( identifier[K] , identifier[axis] = literal[int] ))
identifier[self] . identifier[K] = identifier[K]
keyword[return] identifier[K]
|
def run(self):
"""Run the minimization.
Returns
-------
K : (N,N) ndarray
the optimal rate matrix
"""
if self.verbose:
self.selftest() # depends on [control=['if'], data=[]]
self.count = 0
if self.verbose:
logging.info('initial value of the objective function is %f' % self.function(self.initial)) # depends on [control=['if'], data=[]]
theta0 = self.initial
(theta, f, d) = fmin_l_bfgs_b(self.function_and_gradient, theta0, fprime=None, args=(), approx_grad=False, bounds=self.bounds, factr=self.tol, pgtol=1e-11, disp=0, maxiter=self.maxiter, maxfun=self.maxiter, maxls=100)
if self.verbose:
logging.info('l_bfgs_b says: ' + str(d))
logging.info('objective function value reached: %f' % f) # depends on [control=['if'], data=[]]
if d['warnflag'] != 0:
raise_or_warn(str(d), on_error=self.on_error, warning=NotConvergedWarning, exception=NotConvergedError) # depends on [control=['if'], data=[]]
K = np.zeros((self.N, self.N))
K[self.I, self.J] = theta / self.pi[self.I]
K[self.J, self.I] = theta / self.pi[self.J]
np.fill_diagonal(K, -np.sum(K, axis=1))
self.K = K
return K
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CertificateContext for this CertificateInstance
:rtype: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateContext
"""
if self._context is None:
self._context = CertificateContext(
self._version,
fleet_sid=self._solution['fleet_sid'],
sid=self._solution['sid'],
)
return self._context
|
def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CertificateContext for this CertificateInstance
:rtype: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[CertificateContext], parameter[name[self]._version]]
return[name[self]._context]
|
keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[CertificateContext] (
identifier[self] . identifier[_version] ,
identifier[fleet_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context]
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CertificateContext for this CertificateInstance
:rtype: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateContext
"""
if self._context is None:
self._context = CertificateContext(self._version, fleet_sid=self._solution['fleet_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context
|
def get(self, sid):
"""
Constructs a IpAddressContext
:param sid: A string that identifies the IpAddress resource to fetch
:returns: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
:rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
"""
return IpAddressContext(
self._version,
account_sid=self._solution['account_sid'],
ip_access_control_list_sid=self._solution['ip_access_control_list_sid'],
sid=sid,
)
|
def function[get, parameter[self, sid]]:
constant[
Constructs a IpAddressContext
:param sid: A string that identifies the IpAddress resource to fetch
:returns: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
:rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
]
return[call[name[IpAddressContext], parameter[name[self]._version]]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[sid] ):
literal[string]
keyword[return] identifier[IpAddressContext] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[ip_access_control_list_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[sid] ,
)
|
def get(self, sid):
"""
Constructs a IpAddressContext
:param sid: A string that identifies the IpAddress resource to fetch
:returns: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
:rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.ip_address.IpAddressContext
"""
return IpAddressContext(self._version, account_sid=self._solution['account_sid'], ip_access_control_list_sid=self._solution['ip_access_control_list_sid'], sid=sid)
|
def add_generic_error_message_with_code(request, error_code):
"""
Add message to request indicating that there was an issue processing request.
Arguments:
request: The current request.
error_code: A string error code to be used to point devs to the spot in
the code where this error occurred.
"""
messages.error(
request,
_(
'{strong_start}Something happened.{strong_end} '
'{span_start}Please reach out to your learning administrator with '
'the following error code and they will be able to help you out.{span_end}'
'{span_start}Error code: {error_code}{span_end}'
).format(
error_code=error_code,
strong_start='<strong>',
strong_end='</strong>',
span_start='<span>',
span_end='</span>',
)
)
|
def function[add_generic_error_message_with_code, parameter[request, error_code]]:
constant[
Add message to request indicating that there was an issue processing request.
Arguments:
request: The current request.
error_code: A string error code to be used to point devs to the spot in
the code where this error occurred.
]
call[name[messages].error, parameter[name[request], call[call[name[_], parameter[constant[{strong_start}Something happened.{strong_end} {span_start}Please reach out to your learning administrator with the following error code and they will be able to help you out.{span_end}{span_start}Error code: {error_code}{span_end}]]].format, parameter[]]]]
|
keyword[def] identifier[add_generic_error_message_with_code] ( identifier[request] , identifier[error_code] ):
literal[string]
identifier[messages] . identifier[error] (
identifier[request] ,
identifier[_] (
literal[string]
literal[string]
literal[string]
literal[string]
). identifier[format] (
identifier[error_code] = identifier[error_code] ,
identifier[strong_start] = literal[string] ,
identifier[strong_end] = literal[string] ,
identifier[span_start] = literal[string] ,
identifier[span_end] = literal[string] ,
)
)
|
def add_generic_error_message_with_code(request, error_code):
"""
Add message to request indicating that there was an issue processing request.
Arguments:
request: The current request.
error_code: A string error code to be used to point devs to the spot in
the code where this error occurred.
"""
messages.error(request, _('{strong_start}Something happened.{strong_end} {span_start}Please reach out to your learning administrator with the following error code and they will be able to help you out.{span_end}{span_start}Error code: {error_code}{span_end}').format(error_code=error_code, strong_start='<strong>', strong_end='</strong>', span_start='<span>', span_end='</span>'))
|
def handle_error(self, error, req, schema, error_status_code, error_headers):
"""Handles errors during parsing. Aborts the current HTTP request and
responds with a 422 error.
"""
status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS
abort(
status_code,
exc=error,
messages=error.messages,
schema=schema,
headers=error_headers,
)
|
def function[handle_error, parameter[self, error, req, schema, error_status_code, error_headers]]:
constant[Handles errors during parsing. Aborts the current HTTP request and
responds with a 422 error.
]
variable[status_code] assign[=] <ast.BoolOp object at 0x7da1b22cf1c0>
call[name[abort], parameter[name[status_code]]]
|
keyword[def] identifier[handle_error] ( identifier[self] , identifier[error] , identifier[req] , identifier[schema] , identifier[error_status_code] , identifier[error_headers] ):
literal[string]
identifier[status_code] = identifier[error_status_code] keyword[or] identifier[self] . identifier[DEFAULT_VALIDATION_STATUS]
identifier[abort] (
identifier[status_code] ,
identifier[exc] = identifier[error] ,
identifier[messages] = identifier[error] . identifier[messages] ,
identifier[schema] = identifier[schema] ,
identifier[headers] = identifier[error_headers] ,
)
|
def handle_error(self, error, req, schema, error_status_code, error_headers):
"""Handles errors during parsing. Aborts the current HTTP request and
responds with a 422 error.
"""
status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS
abort(status_code, exc=error, messages=error.messages, schema=schema, headers=error_headers)
|
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the UsernamePasswordCredential struct and
decode it into its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the username is missing from the encoding.
"""
super(UsernamePasswordCredential, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.USERNAME, local_stream):
self._username = primitives.TextString(
tag=enums.Tags.USERNAME
)
self._username.read(local_stream, kmip_version=kmip_version)
else:
raise ValueError(
"Username/password credential encoding missing the username."
)
if self.is_tag_next(enums.Tags.PASSWORD, local_stream):
self._password = primitives.TextString(
tag=enums.Tags.PASSWORD
)
self._password.read(local_stream, kmip_version=kmip_version)
self.is_oversized(local_stream)
|
def function[read, parameter[self, input_stream, kmip_version]]:
constant[
Read the data encoding the UsernamePasswordCredential struct and
decode it into its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the username is missing from the encoding.
]
call[call[name[super], parameter[name[UsernamePasswordCredential], name[self]]].read, parameter[name[input_stream]]]
variable[local_stream] assign[=] call[name[BytearrayStream], parameter[call[name[input_stream].read, parameter[name[self].length]]]]
if call[name[self].is_tag_next, parameter[name[enums].Tags.USERNAME, name[local_stream]]] begin[:]
name[self]._username assign[=] call[name[primitives].TextString, parameter[]]
call[name[self]._username.read, parameter[name[local_stream]]]
if call[name[self].is_tag_next, parameter[name[enums].Tags.PASSWORD, name[local_stream]]] begin[:]
name[self]._password assign[=] call[name[primitives].TextString, parameter[]]
call[name[self]._password.read, parameter[name[local_stream]]]
call[name[self].is_oversized, parameter[name[local_stream]]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[input_stream] , identifier[kmip_version] = identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_0] ):
literal[string]
identifier[super] ( identifier[UsernamePasswordCredential] , identifier[self] ). identifier[read] (
identifier[input_stream] ,
identifier[kmip_version] = identifier[kmip_version]
)
identifier[local_stream] = identifier[BytearrayStream] ( identifier[input_stream] . identifier[read] ( identifier[self] . identifier[length] ))
keyword[if] identifier[self] . identifier[is_tag_next] ( identifier[enums] . identifier[Tags] . identifier[USERNAME] , identifier[local_stream] ):
identifier[self] . identifier[_username] = identifier[primitives] . identifier[TextString] (
identifier[tag] = identifier[enums] . identifier[Tags] . identifier[USERNAME]
)
identifier[self] . identifier[_username] . identifier[read] ( identifier[local_stream] , identifier[kmip_version] = identifier[kmip_version] )
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[if] identifier[self] . identifier[is_tag_next] ( identifier[enums] . identifier[Tags] . identifier[PASSWORD] , identifier[local_stream] ):
identifier[self] . identifier[_password] = identifier[primitives] . identifier[TextString] (
identifier[tag] = identifier[enums] . identifier[Tags] . identifier[PASSWORD]
)
identifier[self] . identifier[_password] . identifier[read] ( identifier[local_stream] , identifier[kmip_version] = identifier[kmip_version] )
identifier[self] . identifier[is_oversized] ( identifier[local_stream] )
|
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the UsernamePasswordCredential struct and
decode it into its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the username is missing from the encoding.
"""
super(UsernamePasswordCredential, self).read(input_stream, kmip_version=kmip_version)
local_stream = BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.USERNAME, local_stream):
self._username = primitives.TextString(tag=enums.Tags.USERNAME)
self._username.read(local_stream, kmip_version=kmip_version) # depends on [control=['if'], data=[]]
else:
raise ValueError('Username/password credential encoding missing the username.')
if self.is_tag_next(enums.Tags.PASSWORD, local_stream):
self._password = primitives.TextString(tag=enums.Tags.PASSWORD)
self._password.read(local_stream, kmip_version=kmip_version) # depends on [control=['if'], data=[]]
self.is_oversized(local_stream)
|
def checkout(self):
'''
Checkout the configured branch/tag. We catch an "Exception" class here
instead of a specific exception class because the exceptions raised by
GitPython when running these functions vary in different versions of
GitPython.
'''
tgt_ref = self.get_checkout_target()
try:
head_sha = self.repo.rev_parse('HEAD').hexsha
except Exception:
# Should only happen the first time we are checking out, since
# we fetch first before ever checking anything out.
head_sha = None
# 'origin/' + tgt_ref ==> matches a branch head
# 'tags/' + tgt_ref + '@{commit}' ==> matches tag's commit
for rev_parse_target, checkout_ref in (
('origin/' + tgt_ref, 'origin/' + tgt_ref),
('tags/' + tgt_ref, 'tags/' + tgt_ref)):
try:
target_sha = self.repo.rev_parse(rev_parse_target).hexsha
except Exception:
# ref does not exist
continue
else:
if head_sha == target_sha:
# No need to checkout, we're already up-to-date
return self.check_root()
try:
with self.gen_lock(lock_type='checkout'):
self.repo.git.checkout(checkout_ref)
log.debug(
'%s remote \'%s\' has been checked out to %s',
self.role,
self.id,
checkout_ref
)
except GitLockError as exc:
if exc.errno == errno.EEXIST:
# Re-raise with a different strerror containing a
# more meaningful error message for the calling
# function.
raise GitLockError(
exc.errno,
'Checkout lock exists for {0} remote \'{1}\''
.format(self.role, self.id)
)
else:
log.error(
'Error %d encountered obtaining checkout lock '
'for %s remote \'%s\'',
exc.errno,
self.role,
self.id
)
return None
except Exception:
continue
return self.check_root()
log.error(
'Failed to checkout %s from %s remote \'%s\': remote ref does '
'not exist', tgt_ref, self.role, self.id
)
return None
|
def function[checkout, parameter[self]]:
constant[
Checkout the configured branch/tag. We catch an "Exception" class here
instead of a specific exception class because the exceptions raised by
GitPython when running these functions vary in different versions of
GitPython.
]
variable[tgt_ref] assign[=] call[name[self].get_checkout_target, parameter[]]
<ast.Try object at 0x7da1b2346b30>
for taget[tuple[[<ast.Name object at 0x7da1b2162b90>, <ast.Name object at 0x7da1b2163bb0>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b2163580>, <ast.Tuple object at 0x7da1b2162d10>]]] begin[:]
<ast.Try object at 0x7da1b2162a10>
<ast.Try object at 0x7da2047ea470>
return[call[name[self].check_root, parameter[]]]
call[name[log].error, parameter[constant[Failed to checkout %s from %s remote '%s': remote ref does not exist], name[tgt_ref], name[self].role, name[self].id]]
return[constant[None]]
|
keyword[def] identifier[checkout] ( identifier[self] ):
literal[string]
identifier[tgt_ref] = identifier[self] . identifier[get_checkout_target] ()
keyword[try] :
identifier[head_sha] = identifier[self] . identifier[repo] . identifier[rev_parse] ( literal[string] ). identifier[hexsha]
keyword[except] identifier[Exception] :
identifier[head_sha] = keyword[None]
keyword[for] identifier[rev_parse_target] , identifier[checkout_ref] keyword[in] (
( literal[string] + identifier[tgt_ref] , literal[string] + identifier[tgt_ref] ),
( literal[string] + identifier[tgt_ref] , literal[string] + identifier[tgt_ref] )):
keyword[try] :
identifier[target_sha] = identifier[self] . identifier[repo] . identifier[rev_parse] ( identifier[rev_parse_target] ). identifier[hexsha]
keyword[except] identifier[Exception] :
keyword[continue]
keyword[else] :
keyword[if] identifier[head_sha] == identifier[target_sha] :
keyword[return] identifier[self] . identifier[check_root] ()
keyword[try] :
keyword[with] identifier[self] . identifier[gen_lock] ( identifier[lock_type] = literal[string] ):
identifier[self] . identifier[repo] . identifier[git] . identifier[checkout] ( identifier[checkout_ref] )
identifier[log] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[role] ,
identifier[self] . identifier[id] ,
identifier[checkout_ref]
)
keyword[except] identifier[GitLockError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] == identifier[errno] . identifier[EEXIST] :
keyword[raise] identifier[GitLockError] (
identifier[exc] . identifier[errno] ,
literal[string]
. identifier[format] ( identifier[self] . identifier[role] , identifier[self] . identifier[id] )
)
keyword[else] :
identifier[log] . identifier[error] (
literal[string]
literal[string] ,
identifier[exc] . identifier[errno] ,
identifier[self] . identifier[role] ,
identifier[self] . identifier[id]
)
keyword[return] keyword[None]
keyword[except] identifier[Exception] :
keyword[continue]
keyword[return] identifier[self] . identifier[check_root] ()
identifier[log] . identifier[error] (
literal[string]
literal[string] , identifier[tgt_ref] , identifier[self] . identifier[role] , identifier[self] . identifier[id]
)
keyword[return] keyword[None]
|
def checkout(self):
"""
Checkout the configured branch/tag. We catch an "Exception" class here
instead of a specific exception class because the exceptions raised by
GitPython when running these functions vary in different versions of
GitPython.
"""
tgt_ref = self.get_checkout_target()
try:
head_sha = self.repo.rev_parse('HEAD').hexsha # depends on [control=['try'], data=[]]
except Exception:
# Should only happen the first time we are checking out, since
# we fetch first before ever checking anything out.
head_sha = None # depends on [control=['except'], data=[]]
# 'origin/' + tgt_ref ==> matches a branch head
# 'tags/' + tgt_ref + '@{commit}' ==> matches tag's commit
for (rev_parse_target, checkout_ref) in (('origin/' + tgt_ref, 'origin/' + tgt_ref), ('tags/' + tgt_ref, 'tags/' + tgt_ref)):
try:
target_sha = self.repo.rev_parse(rev_parse_target).hexsha # depends on [control=['try'], data=[]]
except Exception:
# ref does not exist
continue # depends on [control=['except'], data=[]]
else:
if head_sha == target_sha:
# No need to checkout, we're already up-to-date
return self.check_root() # depends on [control=['if'], data=[]]
try:
with self.gen_lock(lock_type='checkout'):
self.repo.git.checkout(checkout_ref)
log.debug("%s remote '%s' has been checked out to %s", self.role, self.id, checkout_ref) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except GitLockError as exc:
if exc.errno == errno.EEXIST:
# Re-raise with a different strerror containing a
# more meaningful error message for the calling
# function.
raise GitLockError(exc.errno, "Checkout lock exists for {0} remote '{1}'".format(self.role, self.id)) # depends on [control=['if'], data=[]]
else:
log.error("Error %d encountered obtaining checkout lock for %s remote '%s'", exc.errno, self.role, self.id)
return None # depends on [control=['except'], data=['exc']]
except Exception:
continue # depends on [control=['except'], data=[]]
return self.check_root() # depends on [control=['for'], data=[]]
log.error("Failed to checkout %s from %s remote '%s': remote ref does not exist", tgt_ref, self.role, self.id)
return None
|
def _get_color_from_config(config, option):
"""
Helper method to uet an option from the COLOR_SECTION of the config.
Returns None if the value is not present. If the value is present, it tries
to parse the value as a raw string literal, allowing escape sequences in
the egrc.
"""
if not config.has_option(COLOR_SECTION, option):
return None
else:
return ast.literal_eval(config.get(COLOR_SECTION, option))
|
def function[_get_color_from_config, parameter[config, option]]:
constant[
Helper method to uet an option from the COLOR_SECTION of the config.
Returns None if the value is not present. If the value is present, it tries
to parse the value as a raw string literal, allowing escape sequences in
the egrc.
]
if <ast.UnaryOp object at 0x7da20c6c6410> begin[:]
return[constant[None]]
|
keyword[def] identifier[_get_color_from_config] ( identifier[config] , identifier[option] ):
literal[string]
keyword[if] keyword[not] identifier[config] . identifier[has_option] ( identifier[COLOR_SECTION] , identifier[option] ):
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[ast] . identifier[literal_eval] ( identifier[config] . identifier[get] ( identifier[COLOR_SECTION] , identifier[option] ))
|
def _get_color_from_config(config, option):
"""
Helper method to uet an option from the COLOR_SECTION of the config.
Returns None if the value is not present. If the value is present, it tries
to parse the value as a raw string literal, allowing escape sequences in
the egrc.
"""
if not config.has_option(COLOR_SECTION, option):
return None # depends on [control=['if'], data=[]]
else:
return ast.literal_eval(config.get(COLOR_SECTION, option))
|
def yoffset(self, value):
"""gets/sets the yoffset"""
if self._yoffset != value and \
isinstance(value, (int, float, long)):
self._yoffset = value
|
def function[yoffset, parameter[self, value]]:
constant[gets/sets the yoffset]
if <ast.BoolOp object at 0x7da1b11a8fa0> begin[:]
name[self]._yoffset assign[=] name[value]
|
keyword[def] identifier[yoffset] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[_yoffset] != identifier[value] keyword[and] identifier[isinstance] ( identifier[value] ,( identifier[int] , identifier[float] , identifier[long] )):
identifier[self] . identifier[_yoffset] = identifier[value]
|
def yoffset(self, value):
"""gets/sets the yoffset"""
if self._yoffset != value and isinstance(value, (int, float, long)):
self._yoffset = value # depends on [control=['if'], data=[]]
|
def _initialize_upload(self):
"""
Initialized the upload on the API server by submitting the information
about the project, the file name, file size and the part size that is
going to be used during multipart upload.
"""
init_data = {
'name': self._file_name,
'part_size': self._part_size,
'size': self._file_size
}
if self._project:
init_data['project'] = self._project
elif self._parent:
init_data['parent'] = self._parent
init_params = {}
if self._overwrite:
init_params['overwrite'] = self._overwrite
try:
response = self._api.post(
self._URL['upload_init'], data=init_data, params=init_params
)
self._upload_id = response.json()['upload_id']
except SbgError as e:
self._status = TransferState.FAILED
raise SbgError(
'Unable to initialize upload! Failed to get upload id! '
'Reason: {}'.format(e.message)
)
|
def function[_initialize_upload, parameter[self]]:
constant[
Initialized the upload on the API server by submitting the information
about the project, the file name, file size and the part size that is
going to be used during multipart upload.
]
variable[init_data] assign[=] dictionary[[<ast.Constant object at 0x7da20c992020>, <ast.Constant object at 0x7da20c990130>, <ast.Constant object at 0x7da20c992860>], [<ast.Attribute object at 0x7da20c9906a0>, <ast.Attribute object at 0x7da20c993370>, <ast.Attribute object at 0x7da20c9918a0>]]
if name[self]._project begin[:]
call[name[init_data]][constant[project]] assign[=] name[self]._project
variable[init_params] assign[=] dictionary[[], []]
if name[self]._overwrite begin[:]
call[name[init_params]][constant[overwrite]] assign[=] name[self]._overwrite
<ast.Try object at 0x7da2041d9d80>
|
keyword[def] identifier[_initialize_upload] ( identifier[self] ):
literal[string]
identifier[init_data] ={
literal[string] : identifier[self] . identifier[_file_name] ,
literal[string] : identifier[self] . identifier[_part_size] ,
literal[string] : identifier[self] . identifier[_file_size]
}
keyword[if] identifier[self] . identifier[_project] :
identifier[init_data] [ literal[string] ]= identifier[self] . identifier[_project]
keyword[elif] identifier[self] . identifier[_parent] :
identifier[init_data] [ literal[string] ]= identifier[self] . identifier[_parent]
identifier[init_params] ={}
keyword[if] identifier[self] . identifier[_overwrite] :
identifier[init_params] [ literal[string] ]= identifier[self] . identifier[_overwrite]
keyword[try] :
identifier[response] = identifier[self] . identifier[_api] . identifier[post] (
identifier[self] . identifier[_URL] [ literal[string] ], identifier[data] = identifier[init_data] , identifier[params] = identifier[init_params]
)
identifier[self] . identifier[_upload_id] = identifier[response] . identifier[json] ()[ literal[string] ]
keyword[except] identifier[SbgError] keyword[as] identifier[e] :
identifier[self] . identifier[_status] = identifier[TransferState] . identifier[FAILED]
keyword[raise] identifier[SbgError] (
literal[string]
literal[string] . identifier[format] ( identifier[e] . identifier[message] )
)
|
def _initialize_upload(self):
"""
Initialized the upload on the API server by submitting the information
about the project, the file name, file size and the part size that is
going to be used during multipart upload.
"""
init_data = {'name': self._file_name, 'part_size': self._part_size, 'size': self._file_size}
if self._project:
init_data['project'] = self._project # depends on [control=['if'], data=[]]
elif self._parent:
init_data['parent'] = self._parent # depends on [control=['if'], data=[]]
init_params = {}
if self._overwrite:
init_params['overwrite'] = self._overwrite # depends on [control=['if'], data=[]]
try:
response = self._api.post(self._URL['upload_init'], data=init_data, params=init_params)
self._upload_id = response.json()['upload_id'] # depends on [control=['try'], data=[]]
except SbgError as e:
self._status = TransferState.FAILED
raise SbgError('Unable to initialize upload! Failed to get upload id! Reason: {}'.format(e.message)) # depends on [control=['except'], data=['e']]
|
def reconfigure(self, service_id, workers):
"""Reconfigure a service registered in ServiceManager
:param service_id: the service id
:type service_id: uuid.uuid4
:param workers: number of processes/workers for this service
:type workers: int
:raises: ValueError
"""
try:
sc = self._services[service_id]
except KeyError:
raise ValueError("%s service id doesn't exists" % service_id)
else:
_utils.check_workers(workers, minimum=(1 - sc.workers))
sc.workers = workers
# Reset forktimes to respawn services quickly
self._forktimes = []
|
def function[reconfigure, parameter[self, service_id, workers]]:
constant[Reconfigure a service registered in ServiceManager
:param service_id: the service id
:type service_id: uuid.uuid4
:param workers: number of processes/workers for this service
:type workers: int
:raises: ValueError
]
<ast.Try object at 0x7da18dc9af50>
|
keyword[def] identifier[reconfigure] ( identifier[self] , identifier[service_id] , identifier[workers] ):
literal[string]
keyword[try] :
identifier[sc] = identifier[self] . identifier[_services] [ identifier[service_id] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[service_id] )
keyword[else] :
identifier[_utils] . identifier[check_workers] ( identifier[workers] , identifier[minimum] =( literal[int] - identifier[sc] . identifier[workers] ))
identifier[sc] . identifier[workers] = identifier[workers]
identifier[self] . identifier[_forktimes] =[]
|
def reconfigure(self, service_id, workers):
"""Reconfigure a service registered in ServiceManager
:param service_id: the service id
:type service_id: uuid.uuid4
:param workers: number of processes/workers for this service
:type workers: int
:raises: ValueError
"""
try:
sc = self._services[service_id] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError("%s service id doesn't exists" % service_id) # depends on [control=['except'], data=[]]
else:
_utils.check_workers(workers, minimum=1 - sc.workers)
sc.workers = workers
# Reset forktimes to respawn services quickly
self._forktimes = []
|
def _create_jspath(self) -> Path:
"""Create the source directory for the build."""
src = self._build_dir / 'bowtiejs'
os.makedirs(src, exist_ok=True)
return src
|
def function[_create_jspath, parameter[self]]:
constant[Create the source directory for the build.]
variable[src] assign[=] binary_operation[name[self]._build_dir / constant[bowtiejs]]
call[name[os].makedirs, parameter[name[src]]]
return[name[src]]
|
keyword[def] identifier[_create_jspath] ( identifier[self] )-> identifier[Path] :
literal[string]
identifier[src] = identifier[self] . identifier[_build_dir] / literal[string]
identifier[os] . identifier[makedirs] ( identifier[src] , identifier[exist_ok] = keyword[True] )
keyword[return] identifier[src]
|
def _create_jspath(self) -> Path:
"""Create the source directory for the build."""
src = self._build_dir / 'bowtiejs'
os.makedirs(src, exist_ok=True)
return src
|
def _first_word_not_cmd(self,
first_word: str,
command: str,
args: tuple,
kwargs: dict) -> None:
"""
check to see if this is an author or service.
This method does high level control handling
"""
if self.service_interface.is_service(first_word):
self._logger.debug(' first word is a service')
kwargs = self.service_interface.get_metadata(first_word, kwargs)
self._logger.debug(' service transform kwargs: %s', kwargs)
elif self.author_interface.is_author(first_word):
self._logger.debug(' first word is an author')
kwargs = self.author_interface.get_metadata(first_word, kwargs)
self._logger.debug(' author transform kwargs: %s', kwargs)
if not kwargs.get('remote'):
kwargs['remote_command'] = command
command= 'REMOTE'
self.messaging.send_command(command, *args, **kwargs)
return
else:
self.messaging.send_command(command, *args, **kwargs)
|
def function[_first_word_not_cmd, parameter[self, first_word, command, args, kwargs]]:
constant[
check to see if this is an author or service.
This method does high level control handling
]
if call[name[self].service_interface.is_service, parameter[name[first_word]]] begin[:]
call[name[self]._logger.debug, parameter[constant[ first word is a service]]]
variable[kwargs] assign[=] call[name[self].service_interface.get_metadata, parameter[name[first_word], name[kwargs]]]
call[name[self]._logger.debug, parameter[constant[ service transform kwargs: %s], name[kwargs]]]
if <ast.UnaryOp object at 0x7da204567520> begin[:]
call[name[kwargs]][constant[remote_command]] assign[=] name[command]
variable[command] assign[=] constant[REMOTE]
call[name[self].messaging.send_command, parameter[name[command], <ast.Starred object at 0x7da2045646d0>]]
return[None]
|
keyword[def] identifier[_first_word_not_cmd] ( identifier[self] ,
identifier[first_word] : identifier[str] ,
identifier[command] : identifier[str] ,
identifier[args] : identifier[tuple] ,
identifier[kwargs] : identifier[dict] )-> keyword[None] :
literal[string]
keyword[if] identifier[self] . identifier[service_interface] . identifier[is_service] ( identifier[first_word] ):
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[kwargs] = identifier[self] . identifier[service_interface] . identifier[get_metadata] ( identifier[first_word] , identifier[kwargs] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[kwargs] )
keyword[elif] identifier[self] . identifier[author_interface] . identifier[is_author] ( identifier[first_word] ):
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[kwargs] = identifier[self] . identifier[author_interface] . identifier[get_metadata] ( identifier[first_word] , identifier[kwargs] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[kwargs] )
keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ):
identifier[kwargs] [ literal[string] ]= identifier[command]
identifier[command] = literal[string]
identifier[self] . identifier[messaging] . identifier[send_command] ( identifier[command] ,* identifier[args] ,** identifier[kwargs] )
keyword[return]
keyword[else] :
identifier[self] . identifier[messaging] . identifier[send_command] ( identifier[command] ,* identifier[args] ,** identifier[kwargs] )
|
def _first_word_not_cmd(self, first_word: str, command: str, args: tuple, kwargs: dict) -> None:
"""
check to see if this is an author or service.
This method does high level control handling
"""
if self.service_interface.is_service(first_word):
self._logger.debug(' first word is a service')
kwargs = self.service_interface.get_metadata(first_word, kwargs)
self._logger.debug(' service transform kwargs: %s', kwargs) # depends on [control=['if'], data=[]]
elif self.author_interface.is_author(first_word):
self._logger.debug(' first word is an author')
kwargs = self.author_interface.get_metadata(first_word, kwargs)
self._logger.debug(' author transform kwargs: %s', kwargs) # depends on [control=['if'], data=[]]
if not kwargs.get('remote'):
kwargs['remote_command'] = command
command = 'REMOTE'
self.messaging.send_command(command, *args, **kwargs)
return # depends on [control=['if'], data=[]]
else:
self.messaging.send_command(command, *args, **kwargs)
|
def _pop_digits(char_list):
"""Pop consecutive digits from the front of list and return them
Pops any and all consecutive digits from the start of the provided
character list and returns them as a list of string digits.
Operates on (and possibly alters) the passed list.
:param list char_list: a list of characters
:return: a list of string digits
:rtype: list
"""
logger.debug('_pop_digits(%s)', char_list)
digits = []
while len(char_list) != 0 and char_list[0].isdigit():
digits.append(char_list.pop(0))
logger.debug('got digits: %s', digits)
logger.debug('updated char list: %s', char_list)
return digits
|
def function[_pop_digits, parameter[char_list]]:
constant[Pop consecutive digits from the front of list and return them
Pops any and all consecutive digits from the start of the provided
character list and returns them as a list of string digits.
Operates on (and possibly alters) the passed list.
:param list char_list: a list of characters
:return: a list of string digits
:rtype: list
]
call[name[logger].debug, parameter[constant[_pop_digits(%s)], name[char_list]]]
variable[digits] assign[=] list[[]]
while <ast.BoolOp object at 0x7da1b23b3850> begin[:]
call[name[digits].append, parameter[call[name[char_list].pop, parameter[constant[0]]]]]
call[name[logger].debug, parameter[constant[got digits: %s], name[digits]]]
call[name[logger].debug, parameter[constant[updated char list: %s], name[char_list]]]
return[name[digits]]
|
keyword[def] identifier[_pop_digits] ( identifier[char_list] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[char_list] )
identifier[digits] =[]
keyword[while] identifier[len] ( identifier[char_list] )!= literal[int] keyword[and] identifier[char_list] [ literal[int] ]. identifier[isdigit] ():
identifier[digits] . identifier[append] ( identifier[char_list] . identifier[pop] ( literal[int] ))
identifier[logger] . identifier[debug] ( literal[string] , identifier[digits] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[char_list] )
keyword[return] identifier[digits]
|
def _pop_digits(char_list):
"""Pop consecutive digits from the front of list and return them
Pops any and all consecutive digits from the start of the provided
character list and returns them as a list of string digits.
Operates on (and possibly alters) the passed list.
:param list char_list: a list of characters
:return: a list of string digits
:rtype: list
"""
logger.debug('_pop_digits(%s)', char_list)
digits = []
while len(char_list) != 0 and char_list[0].isdigit():
digits.append(char_list.pop(0)) # depends on [control=['while'], data=[]]
logger.debug('got digits: %s', digits)
logger.debug('updated char list: %s', char_list)
return digits
|
def make_xpath_ranges(html, phrase):
'''Given a HTML string and a `phrase`, build a regex to find offsets
for the phrase, and then build a list of `XPathRange` objects for
it. If this fails, return empty list.
'''
if not html:
return []
if not isinstance(phrase, unicode):
try:
phrase = phrase.decode('utf8')
except:
logger.info('failed %r.decode("utf8")', exc_info=True)
return []
phrase_re = re.compile(
phrase, flags=re.UNICODE | re.IGNORECASE | re.MULTILINE)
spans = []
for match in phrase_re.finditer(html, overlapped=False):
spans.append(match.span()) # a list of tuple(start, end) char indexes
# now run fancy aligner magic to get xpath info and format them as
# XPathRange per above
try:
xpath_ranges = list(char_offsets_to_xpaths(html, spans))
except:
logger.info('failed to get xpaths', exc_info=True)
return []
ranges = []
for xpath_range in filter(None, xpath_ranges):
ranges.append(dict(
start=dict(node=xpath_range.start_xpath,
idx=xpath_range.start_offset),
end=dict(node=xpath_range.end_xpath,
idx=xpath_range.end_offset)))
return ranges
|
def function[make_xpath_ranges, parameter[html, phrase]]:
constant[Given a HTML string and a `phrase`, build a regex to find offsets
for the phrase, and then build a list of `XPathRange` objects for
it. If this fails, return empty list.
]
if <ast.UnaryOp object at 0x7da18dc9ab60> begin[:]
return[list[[]]]
if <ast.UnaryOp object at 0x7da18dc99450> begin[:]
<ast.Try object at 0x7da18dc991e0>
variable[phrase_re] assign[=] call[name[re].compile, parameter[name[phrase]]]
variable[spans] assign[=] list[[]]
for taget[name[match]] in starred[call[name[phrase_re].finditer, parameter[name[html]]]] begin[:]
call[name[spans].append, parameter[call[name[match].span, parameter[]]]]
<ast.Try object at 0x7da18dc9a740>
variable[ranges] assign[=] list[[]]
for taget[name[xpath_range]] in starred[call[name[filter], parameter[constant[None], name[xpath_ranges]]]] begin[:]
call[name[ranges].append, parameter[call[name[dict], parameter[]]]]
return[name[ranges]]
|
keyword[def] identifier[make_xpath_ranges] ( identifier[html] , identifier[phrase] ):
literal[string]
keyword[if] keyword[not] identifier[html] :
keyword[return] []
keyword[if] keyword[not] identifier[isinstance] ( identifier[phrase] , identifier[unicode] ):
keyword[try] :
identifier[phrase] = identifier[phrase] . identifier[decode] ( literal[string] )
keyword[except] :
identifier[logger] . identifier[info] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return] []
identifier[phrase_re] = identifier[re] . identifier[compile] (
identifier[phrase] , identifier[flags] = identifier[re] . identifier[UNICODE] | identifier[re] . identifier[IGNORECASE] | identifier[re] . identifier[MULTILINE] )
identifier[spans] =[]
keyword[for] identifier[match] keyword[in] identifier[phrase_re] . identifier[finditer] ( identifier[html] , identifier[overlapped] = keyword[False] ):
identifier[spans] . identifier[append] ( identifier[match] . identifier[span] ())
keyword[try] :
identifier[xpath_ranges] = identifier[list] ( identifier[char_offsets_to_xpaths] ( identifier[html] , identifier[spans] ))
keyword[except] :
identifier[logger] . identifier[info] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return] []
identifier[ranges] =[]
keyword[for] identifier[xpath_range] keyword[in] identifier[filter] ( keyword[None] , identifier[xpath_ranges] ):
identifier[ranges] . identifier[append] ( identifier[dict] (
identifier[start] = identifier[dict] ( identifier[node] = identifier[xpath_range] . identifier[start_xpath] ,
identifier[idx] = identifier[xpath_range] . identifier[start_offset] ),
identifier[end] = identifier[dict] ( identifier[node] = identifier[xpath_range] . identifier[end_xpath] ,
identifier[idx] = identifier[xpath_range] . identifier[end_offset] )))
keyword[return] identifier[ranges]
|
def make_xpath_ranges(html, phrase):
"""Given a HTML string and a `phrase`, build a regex to find offsets
for the phrase, and then build a list of `XPathRange` objects for
it. If this fails, return empty list.
"""
if not html:
return [] # depends on [control=['if'], data=[]]
if not isinstance(phrase, unicode):
try:
phrase = phrase.decode('utf8') # depends on [control=['try'], data=[]]
except:
logger.info('failed %r.decode("utf8")', exc_info=True)
return [] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
phrase_re = re.compile(phrase, flags=re.UNICODE | re.IGNORECASE | re.MULTILINE)
spans = []
for match in phrase_re.finditer(html, overlapped=False):
spans.append(match.span()) # a list of tuple(start, end) char indexes # depends on [control=['for'], data=['match']]
# now run fancy aligner magic to get xpath info and format them as
# XPathRange per above
try:
xpath_ranges = list(char_offsets_to_xpaths(html, spans)) # depends on [control=['try'], data=[]]
except:
logger.info('failed to get xpaths', exc_info=True)
return [] # depends on [control=['except'], data=[]]
ranges = []
for xpath_range in filter(None, xpath_ranges):
ranges.append(dict(start=dict(node=xpath_range.start_xpath, idx=xpath_range.start_offset), end=dict(node=xpath_range.end_xpath, idx=xpath_range.end_offset))) # depends on [control=['for'], data=['xpath_range']]
return ranges
|
def merged_type(t, s):
# type: (AbstractType, AbstractType) -> Optional[AbstractType]
"""Return merged type if two items can be merged in to a different, more general type.
Return None if merging is not possible.
"""
if isinstance(t, TupleType) and isinstance(s, TupleType):
if len(t.items) == len(s.items):
return TupleType([combine_types([ti, si]) for ti, si in zip(t.items, s.items)])
all_items = t.items + s.items
if all_items and all(item == all_items[0] for item in all_items[1:]):
# Merge multiple compatible fixed-length tuples into a variable-length tuple type.
return ClassType('Tuple', [all_items[0]])
elif (isinstance(t, TupleType) and isinstance(s, ClassType) and s.name == 'Tuple'
and len(s.args) == 1):
if all(item == s.args[0] for item in t.items):
# Merge fixed-length tuple and variable-length tuple.
return s
elif isinstance(s, TupleType) and isinstance(t, ClassType) and t.name == 'Tuple':
return merged_type(s, t)
elif isinstance(s, NoReturnType):
return t
elif isinstance(t, NoReturnType):
return s
elif isinstance(s, AnyType):
# This seems to be usually desirable, since Anys tend to come from unknown types.
return t
elif isinstance(t, AnyType):
# Similar to above.
return s
return None
|
def function[merged_type, parameter[t, s]]:
constant[Return merged type if two items can be merged in to a different, more general type.
Return None if merging is not possible.
]
if <ast.BoolOp object at 0x7da2047e8490> begin[:]
if compare[call[name[len], parameter[name[t].items]] equal[==] call[name[len], parameter[name[s].items]]] begin[:]
return[call[name[TupleType], parameter[<ast.ListComp object at 0x7da1b120a260>]]]
variable[all_items] assign[=] binary_operation[name[t].items + name[s].items]
if <ast.BoolOp object at 0x7da1b1209600> begin[:]
return[call[name[ClassType], parameter[constant[Tuple], list[[<ast.Subscript object at 0x7da1b12090f0>]]]]]
return[constant[None]]
|
keyword[def] identifier[merged_type] ( identifier[t] , identifier[s] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[t] , identifier[TupleType] ) keyword[and] identifier[isinstance] ( identifier[s] , identifier[TupleType] ):
keyword[if] identifier[len] ( identifier[t] . identifier[items] )== identifier[len] ( identifier[s] . identifier[items] ):
keyword[return] identifier[TupleType] ([ identifier[combine_types] ([ identifier[ti] , identifier[si] ]) keyword[for] identifier[ti] , identifier[si] keyword[in] identifier[zip] ( identifier[t] . identifier[items] , identifier[s] . identifier[items] )])
identifier[all_items] = identifier[t] . identifier[items] + identifier[s] . identifier[items]
keyword[if] identifier[all_items] keyword[and] identifier[all] ( identifier[item] == identifier[all_items] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[all_items] [ literal[int] :]):
keyword[return] identifier[ClassType] ( literal[string] ,[ identifier[all_items] [ literal[int] ]])
keyword[elif] ( identifier[isinstance] ( identifier[t] , identifier[TupleType] ) keyword[and] identifier[isinstance] ( identifier[s] , identifier[ClassType] ) keyword[and] identifier[s] . identifier[name] == literal[string]
keyword[and] identifier[len] ( identifier[s] . identifier[args] )== literal[int] ):
keyword[if] identifier[all] ( identifier[item] == identifier[s] . identifier[args] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[t] . identifier[items] ):
keyword[return] identifier[s]
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[TupleType] ) keyword[and] identifier[isinstance] ( identifier[t] , identifier[ClassType] ) keyword[and] identifier[t] . identifier[name] == literal[string] :
keyword[return] identifier[merged_type] ( identifier[s] , identifier[t] )
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[NoReturnType] ):
keyword[return] identifier[t]
keyword[elif] identifier[isinstance] ( identifier[t] , identifier[NoReturnType] ):
keyword[return] identifier[s]
keyword[elif] identifier[isinstance] ( identifier[s] , identifier[AnyType] ):
keyword[return] identifier[t]
keyword[elif] identifier[isinstance] ( identifier[t] , identifier[AnyType] ):
keyword[return] identifier[s]
keyword[return] keyword[None]
|
def merged_type(t, s):
# type: (AbstractType, AbstractType) -> Optional[AbstractType]
'Return merged type if two items can be merged in to a different, more general type.\n\n Return None if merging is not possible.\n '
if isinstance(t, TupleType) and isinstance(s, TupleType):
if len(t.items) == len(s.items):
return TupleType([combine_types([ti, si]) for (ti, si) in zip(t.items, s.items)]) # depends on [control=['if'], data=[]]
all_items = t.items + s.items
if all_items and all((item == all_items[0] for item in all_items[1:])):
# Merge multiple compatible fixed-length tuples into a variable-length tuple type.
return ClassType('Tuple', [all_items[0]]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(t, TupleType) and isinstance(s, ClassType) and (s.name == 'Tuple') and (len(s.args) == 1):
if all((item == s.args[0] for item in t.items)):
# Merge fixed-length tuple and variable-length tuple.
return s # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(s, TupleType) and isinstance(t, ClassType) and (t.name == 'Tuple'):
return merged_type(s, t) # depends on [control=['if'], data=[]]
elif isinstance(s, NoReturnType):
return t # depends on [control=['if'], data=[]]
elif isinstance(t, NoReturnType):
return s # depends on [control=['if'], data=[]]
elif isinstance(s, AnyType):
# This seems to be usually desirable, since Anys tend to come from unknown types.
return t # depends on [control=['if'], data=[]]
elif isinstance(t, AnyType):
# Similar to above.
return s # depends on [control=['if'], data=[]]
return None
|
def count_dimensions(entry):
"""Counts the number of dimensions from a nested list of dimension assignments
that may include function calls.
"""
result = 0
for e in entry:
if isinstance(e, str):
sliced = e.strip(",").split(",")
result += 0 if len(sliced) == 1 and sliced[0] == "" else len(sliced)
return result
|
def function[count_dimensions, parameter[entry]]:
constant[Counts the number of dimensions from a nested list of dimension assignments
that may include function calls.
]
variable[result] assign[=] constant[0]
for taget[name[e]] in starred[name[entry]] begin[:]
if call[name[isinstance], parameter[name[e], name[str]]] begin[:]
variable[sliced] assign[=] call[call[name[e].strip, parameter[constant[,]]].split, parameter[constant[,]]]
<ast.AugAssign object at 0x7da20e954190>
return[name[result]]
|
keyword[def] identifier[count_dimensions] ( identifier[entry] ):
literal[string]
identifier[result] = literal[int]
keyword[for] identifier[e] keyword[in] identifier[entry] :
keyword[if] identifier[isinstance] ( identifier[e] , identifier[str] ):
identifier[sliced] = identifier[e] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[result] += literal[int] keyword[if] identifier[len] ( identifier[sliced] )== literal[int] keyword[and] identifier[sliced] [ literal[int] ]== literal[string] keyword[else] identifier[len] ( identifier[sliced] )
keyword[return] identifier[result]
|
def count_dimensions(entry):
"""Counts the number of dimensions from a nested list of dimension assignments
that may include function calls.
"""
result = 0
for e in entry:
if isinstance(e, str):
sliced = e.strip(',').split(',')
result += 0 if len(sliced) == 1 and sliced[0] == '' else len(sliced) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']]
return result
|
async def get_sound_settings(self, target="") -> List[Setting]:
"""Get the current sound settings.
:param str target: settings target, defaults to all.
"""
res = await self.services["audio"]["getSoundSettings"]({"target": target})
return [Setting.make(**x) for x in res]
|
<ast.AsyncFunctionDef object at 0x7da18f00d6f0>
|
keyword[async] keyword[def] identifier[get_sound_settings] ( identifier[self] , identifier[target] = literal[string] )-> identifier[List] [ identifier[Setting] ]:
literal[string]
identifier[res] = keyword[await] identifier[self] . identifier[services] [ literal[string] ][ literal[string] ]({ literal[string] : identifier[target] })
keyword[return] [ identifier[Setting] . identifier[make] (** identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[res] ]
|
async def get_sound_settings(self, target='') -> List[Setting]:
"""Get the current sound settings.
:param str target: settings target, defaults to all.
"""
res = await self.services['audio']['getSoundSettings']({'target': target})
return [Setting.make(**x) for x in res]
|
def binom(n, k):
"""
Returns binomial coefficient (n choose k).
"""
# http://blog.plover.com/math/choose.html
if k > n:
return 0
if k == 0:
return 1
result = 1
for denom in range(1, k + 1):
result *= n
result /= denom
n -= 1
return result
|
def function[binom, parameter[n, k]]:
constant[
Returns binomial coefficient (n choose k).
]
if compare[name[k] greater[>] name[n]] begin[:]
return[constant[0]]
if compare[name[k] equal[==] constant[0]] begin[:]
return[constant[1]]
variable[result] assign[=] constant[1]
for taget[name[denom]] in starred[call[name[range], parameter[constant[1], binary_operation[name[k] + constant[1]]]]] begin[:]
<ast.AugAssign object at 0x7da1b0aedff0>
<ast.AugAssign object at 0x7da1b0aec130>
<ast.AugAssign object at 0x7da1b0aeea10>
return[name[result]]
|
keyword[def] identifier[binom] ( identifier[n] , identifier[k] ):
literal[string]
keyword[if] identifier[k] > identifier[n] :
keyword[return] literal[int]
keyword[if] identifier[k] == literal[int] :
keyword[return] literal[int]
identifier[result] = literal[int]
keyword[for] identifier[denom] keyword[in] identifier[range] ( literal[int] , identifier[k] + literal[int] ):
identifier[result] *= identifier[n]
identifier[result] /= identifier[denom]
identifier[n] -= literal[int]
keyword[return] identifier[result]
|
def binom(n, k):
"""
Returns binomial coefficient (n choose k).
"""
# http://blog.plover.com/math/choose.html
if k > n:
return 0 # depends on [control=['if'], data=[]]
if k == 0:
return 1 # depends on [control=['if'], data=[]]
result = 1
for denom in range(1, k + 1):
result *= n
result /= denom
n -= 1 # depends on [control=['for'], data=['denom']]
return result
|
def serialize(self):
"""
Serializes into a bytestring.
:returns: An object of type Bytes.
"""
d = self.__getstate__()
return pickle.dumps({
'name': d['name'],
'seg': pickle.dumps(d['seg'], protocol=-1),
}, protocol=-1)
|
def function[serialize, parameter[self]]:
constant[
Serializes into a bytestring.
:returns: An object of type Bytes.
]
variable[d] assign[=] call[name[self].__getstate__, parameter[]]
return[call[name[pickle].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b0550160>, <ast.Constant object at 0x7da1b05506d0>], [<ast.Subscript object at 0x7da1b05520b0>, <ast.Call object at 0x7da1b0550b50>]]]]]
|
keyword[def] identifier[serialize] ( identifier[self] ):
literal[string]
identifier[d] = identifier[self] . identifier[__getstate__] ()
keyword[return] identifier[pickle] . identifier[dumps] ({
literal[string] : identifier[d] [ literal[string] ],
literal[string] : identifier[pickle] . identifier[dumps] ( identifier[d] [ literal[string] ], identifier[protocol] =- literal[int] ),
}, identifier[protocol] =- literal[int] )
|
def serialize(self):
"""
Serializes into a bytestring.
:returns: An object of type Bytes.
"""
d = self.__getstate__()
return pickle.dumps({'name': d['name'], 'seg': pickle.dumps(d['seg'], protocol=-1)}, protocol=-1)
|
def css(self, css):
""" Finds another node by a CSS selector relative to the current node. """
return [self.get_node_factory().create(node_id)
for node_id in self._get_css_ids(css).split(",")
if node_id]
|
def function[css, parameter[self, css]]:
constant[ Finds another node by a CSS selector relative to the current node. ]
return[<ast.ListComp object at 0x7da20c7ca620>]
|
keyword[def] identifier[css] ( identifier[self] , identifier[css] ):
literal[string]
keyword[return] [ identifier[self] . identifier[get_node_factory] (). identifier[create] ( identifier[node_id] )
keyword[for] identifier[node_id] keyword[in] identifier[self] . identifier[_get_css_ids] ( identifier[css] ). identifier[split] ( literal[string] )
keyword[if] identifier[node_id] ]
|
def css(self, css):
""" Finds another node by a CSS selector relative to the current node. """
return [self.get_node_factory().create(node_id) for node_id in self._get_css_ids(css).split(',') if node_id]
|
def open_target_group_for_form(self, form):
"""
Makes sure that the first group that should be open is open.
This is either the first group with errors or the first group
in the container, unless that first group was originally set to
active=False.
"""
target = self.first_container_with_errors(form.errors.keys())
if target is None:
target = self.fields[0]
if not getattr(target, '_active_originally_included', None):
target.active = True
return target
target.active = True
return target
|
def function[open_target_group_for_form, parameter[self, form]]:
constant[
Makes sure that the first group that should be open is open.
This is either the first group with errors or the first group
in the container, unless that first group was originally set to
active=False.
]
variable[target] assign[=] call[name[self].first_container_with_errors, parameter[call[name[form].errors.keys, parameter[]]]]
if compare[name[target] is constant[None]] begin[:]
variable[target] assign[=] call[name[self].fields][constant[0]]
if <ast.UnaryOp object at 0x7da20c6ab100> begin[:]
name[target].active assign[=] constant[True]
return[name[target]]
name[target].active assign[=] constant[True]
return[name[target]]
|
keyword[def] identifier[open_target_group_for_form] ( identifier[self] , identifier[form] ):
literal[string]
identifier[target] = identifier[self] . identifier[first_container_with_errors] ( identifier[form] . identifier[errors] . identifier[keys] ())
keyword[if] identifier[target] keyword[is] keyword[None] :
identifier[target] = identifier[self] . identifier[fields] [ literal[int] ]
keyword[if] keyword[not] identifier[getattr] ( identifier[target] , literal[string] , keyword[None] ):
identifier[target] . identifier[active] = keyword[True]
keyword[return] identifier[target]
identifier[target] . identifier[active] = keyword[True]
keyword[return] identifier[target]
|
def open_target_group_for_form(self, form):
"""
Makes sure that the first group that should be open is open.
This is either the first group with errors or the first group
in the container, unless that first group was originally set to
active=False.
"""
target = self.first_container_with_errors(form.errors.keys())
if target is None:
target = self.fields[0]
if not getattr(target, '_active_originally_included', None):
target.active = True # depends on [control=['if'], data=[]]
return target # depends on [control=['if'], data=['target']]
target.active = True
return target
|
def connect(self, uuid_value, wait=None):
"""Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
"""
if self.connected:
raise HardwareError("Cannot connect when we are already connected")
if uuid_value not in self._scanned_devices:
self.scan(wait=wait)
with self._scan_lock:
if uuid_value not in self._scanned_devices:
raise HardwareError("Could not find device to connect to by UUID", uuid=uuid_value)
connstring = self._scanned_devices[uuid_value]['connection_string']
self.connect_direct(connstring)
|
def function[connect, parameter[self, uuid_value, wait]]:
constant[Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
]
if name[self].connected begin[:]
<ast.Raise object at 0x7da20c76e1d0>
if compare[name[uuid_value] <ast.NotIn object at 0x7da2590d7190> name[self]._scanned_devices] begin[:]
call[name[self].scan, parameter[]]
with name[self]._scan_lock begin[:]
if compare[name[uuid_value] <ast.NotIn object at 0x7da2590d7190> name[self]._scanned_devices] begin[:]
<ast.Raise object at 0x7da20c76fb80>
variable[connstring] assign[=] call[call[name[self]._scanned_devices][name[uuid_value]]][constant[connection_string]]
call[name[self].connect_direct, parameter[name[connstring]]]
|
keyword[def] identifier[connect] ( identifier[self] , identifier[uuid_value] , identifier[wait] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[connected] :
keyword[raise] identifier[HardwareError] ( literal[string] )
keyword[if] identifier[uuid_value] keyword[not] keyword[in] identifier[self] . identifier[_scanned_devices] :
identifier[self] . identifier[scan] ( identifier[wait] = identifier[wait] )
keyword[with] identifier[self] . identifier[_scan_lock] :
keyword[if] identifier[uuid_value] keyword[not] keyword[in] identifier[self] . identifier[_scanned_devices] :
keyword[raise] identifier[HardwareError] ( literal[string] , identifier[uuid] = identifier[uuid_value] )
identifier[connstring] = identifier[self] . identifier[_scanned_devices] [ identifier[uuid_value] ][ literal[string] ]
identifier[self] . identifier[connect_direct] ( identifier[connstring] )
|
def connect(self, uuid_value, wait=None):
"""Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
"""
if self.connected:
raise HardwareError('Cannot connect when we are already connected') # depends on [control=['if'], data=[]]
if uuid_value not in self._scanned_devices:
self.scan(wait=wait) # depends on [control=['if'], data=[]]
with self._scan_lock:
if uuid_value not in self._scanned_devices:
raise HardwareError('Could not find device to connect to by UUID', uuid=uuid_value) # depends on [control=['if'], data=['uuid_value']]
connstring = self._scanned_devices[uuid_value]['connection_string'] # depends on [control=['with'], data=[]]
self.connect_direct(connstring)
|
def parse_machine_listing(text: str, convert: bool=True, strict: bool=True) -> \
List[dict]:
'''Parse machine listing.
Args:
text: The listing.
convert: Convert sizes and dates.
strict: Method of handling errors. ``True`` will raise
``ValueError``. ``False`` will ignore rows with errors.
Returns:
list: A list of dict of the facts defined in RFC 3659.
The key names must be lowercase. The filename uses the key
``name``.
'''
# TODO: this function should be moved into the 'ls' package
listing = []
for line in text.splitlines(False):
facts = line.split(';')
row = {}
filename = None
for fact in facts:
name, sep, value = fact.partition('=')
if sep:
name = name.strip().lower()
value = value.strip().lower()
if convert:
try:
value = convert_machine_list_value(name, value)
except ValueError:
if strict:
raise
row[name] = value
else:
if name[0:1] == ' ':
# Is a filename
filename = name[1:]
else:
name = name.strip().lower()
row[name] = ''
if filename:
row['name'] = filename
listing.append(row)
elif strict:
raise ValueError('Missing filename.')
return listing
|
def function[parse_machine_listing, parameter[text, convert, strict]]:
constant[Parse machine listing.
Args:
text: The listing.
convert: Convert sizes and dates.
strict: Method of handling errors. ``True`` will raise
``ValueError``. ``False`` will ignore rows with errors.
Returns:
list: A list of dict of the facts defined in RFC 3659.
The key names must be lowercase. The filename uses the key
``name``.
]
variable[listing] assign[=] list[[]]
for taget[name[line]] in starred[call[name[text].splitlines, parameter[constant[False]]]] begin[:]
variable[facts] assign[=] call[name[line].split, parameter[constant[;]]]
variable[row] assign[=] dictionary[[], []]
variable[filename] assign[=] constant[None]
for taget[name[fact]] in starred[name[facts]] begin[:]
<ast.Tuple object at 0x7da18f09d420> assign[=] call[name[fact].partition, parameter[constant[=]]]
if name[sep] begin[:]
variable[name] assign[=] call[call[name[name].strip, parameter[]].lower, parameter[]]
variable[value] assign[=] call[call[name[value].strip, parameter[]].lower, parameter[]]
if name[convert] begin[:]
<ast.Try object at 0x7da18f09d8a0>
call[name[row]][name[name]] assign[=] name[value]
if name[filename] begin[:]
call[name[row]][constant[name]] assign[=] name[filename]
call[name[listing].append, parameter[name[row]]]
return[name[listing]]
|
keyword[def] identifier[parse_machine_listing] ( identifier[text] : identifier[str] , identifier[convert] : identifier[bool] = keyword[True] , identifier[strict] : identifier[bool] = keyword[True] )-> identifier[List] [ identifier[dict] ]:
literal[string]
identifier[listing] =[]
keyword[for] identifier[line] keyword[in] identifier[text] . identifier[splitlines] ( keyword[False] ):
identifier[facts] = identifier[line] . identifier[split] ( literal[string] )
identifier[row] ={}
identifier[filename] = keyword[None]
keyword[for] identifier[fact] keyword[in] identifier[facts] :
identifier[name] , identifier[sep] , identifier[value] = identifier[fact] . identifier[partition] ( literal[string] )
keyword[if] identifier[sep] :
identifier[name] = identifier[name] . identifier[strip] (). identifier[lower] ()
identifier[value] = identifier[value] . identifier[strip] (). identifier[lower] ()
keyword[if] identifier[convert] :
keyword[try] :
identifier[value] = identifier[convert_machine_list_value] ( identifier[name] , identifier[value] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[strict] :
keyword[raise]
identifier[row] [ identifier[name] ]= identifier[value]
keyword[else] :
keyword[if] identifier[name] [ literal[int] : literal[int] ]== literal[string] :
identifier[filename] = identifier[name] [ literal[int] :]
keyword[else] :
identifier[name] = identifier[name] . identifier[strip] (). identifier[lower] ()
identifier[row] [ identifier[name] ]= literal[string]
keyword[if] identifier[filename] :
identifier[row] [ literal[string] ]= identifier[filename]
identifier[listing] . identifier[append] ( identifier[row] )
keyword[elif] identifier[strict] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[listing]
|
def parse_machine_listing(text: str, convert: bool=True, strict: bool=True) -> List[dict]:
"""Parse machine listing.
Args:
text: The listing.
convert: Convert sizes and dates.
strict: Method of handling errors. ``True`` will raise
``ValueError``. ``False`` will ignore rows with errors.
Returns:
list: A list of dict of the facts defined in RFC 3659.
The key names must be lowercase. The filename uses the key
``name``.
"""
# TODO: this function should be moved into the 'ls' package
listing = []
for line in text.splitlines(False):
facts = line.split(';')
row = {}
filename = None
for fact in facts:
(name, sep, value) = fact.partition('=')
if sep:
name = name.strip().lower()
value = value.strip().lower()
if convert:
try:
value = convert_machine_list_value(name, value) # depends on [control=['try'], data=[]]
except ValueError:
if strict:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
row[name] = value # depends on [control=['if'], data=[]]
elif name[0:1] == ' ':
# Is a filename
filename = name[1:] # depends on [control=['if'], data=[]]
else:
name = name.strip().lower()
row[name] = '' # depends on [control=['for'], data=['fact']]
if filename:
row['name'] = filename
listing.append(row) # depends on [control=['if'], data=[]]
elif strict:
raise ValueError('Missing filename.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return listing
|
def _get_asset_content(self, asset_content_id):
"""stub"""
asset_content = None
for asset in self._asset_lookup_session.get_assets():
for content in asset.get_asset_contents():
if content.get_id() == asset_content_id:
asset_content = content
break
if asset_content is not None:
break
if asset_content is None:
raise NotFound('THe AWS Adapter could not find AssetContent ' +
str(asset_content_id))
return asset_content
|
def function[_get_asset_content, parameter[self, asset_content_id]]:
constant[stub]
variable[asset_content] assign[=] constant[None]
for taget[name[asset]] in starred[call[name[self]._asset_lookup_session.get_assets, parameter[]]] begin[:]
for taget[name[content]] in starred[call[name[asset].get_asset_contents, parameter[]]] begin[:]
if compare[call[name[content].get_id, parameter[]] equal[==] name[asset_content_id]] begin[:]
variable[asset_content] assign[=] name[content]
break
if compare[name[asset_content] is_not constant[None]] begin[:]
break
if compare[name[asset_content] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c795ff0>
return[name[asset_content]]
|
keyword[def] identifier[_get_asset_content] ( identifier[self] , identifier[asset_content_id] ):
literal[string]
identifier[asset_content] = keyword[None]
keyword[for] identifier[asset] keyword[in] identifier[self] . identifier[_asset_lookup_session] . identifier[get_assets] ():
keyword[for] identifier[content] keyword[in] identifier[asset] . identifier[get_asset_contents] ():
keyword[if] identifier[content] . identifier[get_id] ()== identifier[asset_content_id] :
identifier[asset_content] = identifier[content]
keyword[break]
keyword[if] identifier[asset_content] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[if] identifier[asset_content] keyword[is] keyword[None] :
keyword[raise] identifier[NotFound] ( literal[string] +
identifier[str] ( identifier[asset_content_id] ))
keyword[return] identifier[asset_content]
|
def _get_asset_content(self, asset_content_id):
"""stub"""
asset_content = None
for asset in self._asset_lookup_session.get_assets():
for content in asset.get_asset_contents():
if content.get_id() == asset_content_id:
asset_content = content
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['content']]
if asset_content is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['asset']]
if asset_content is None:
raise NotFound('THe AWS Adapter could not find AssetContent ' + str(asset_content_id)) # depends on [control=['if'], data=[]]
return asset_content
|
def _merge_entity(entity, if_match, require_encryption=False, key_encryption_key=None):
'''
Constructs a merge entity request.
'''
_validate_not_none('if_match', if_match)
_validate_entity(entity)
_validate_encryption_unsupported(require_encryption, key_encryption_key)
request = HTTPRequest()
request.method = 'MERGE'
request.headers = {
_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1],
_DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1],
'If-Match': _to_str(if_match)
}
request.body = _get_request_body(_convert_entity_to_json(entity))
return request
|
def function[_merge_entity, parameter[entity, if_match, require_encryption, key_encryption_key]]:
constant[
Constructs a merge entity request.
]
call[name[_validate_not_none], parameter[constant[if_match], name[if_match]]]
call[name[_validate_entity], parameter[name[entity]]]
call[name[_validate_encryption_unsupported], parameter[name[require_encryption], name[key_encryption_key]]]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[MERGE]
name[request].headers assign[=] dictionary[[<ast.Subscript object at 0x7da2054a6380>, <ast.Subscript object at 0x7da2054a4310>, <ast.Constant object at 0x7da2054a5b40>], [<ast.Subscript object at 0x7da2054a6bf0>, <ast.Subscript object at 0x7da2054a4190>, <ast.Call object at 0x7da2054a52a0>]]
name[request].body assign[=] call[name[_get_request_body], parameter[call[name[_convert_entity_to_json], parameter[name[entity]]]]]
return[name[request]]
|
keyword[def] identifier[_merge_entity] ( identifier[entity] , identifier[if_match] , identifier[require_encryption] = keyword[False] , identifier[key_encryption_key] = keyword[None] ):
literal[string]
identifier[_validate_not_none] ( literal[string] , identifier[if_match] )
identifier[_validate_entity] ( identifier[entity] )
identifier[_validate_encryption_unsupported] ( identifier[require_encryption] , identifier[key_encryption_key] )
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[headers] ={
identifier[_DEFAULT_CONTENT_TYPE_HEADER] [ literal[int] ]: identifier[_DEFAULT_CONTENT_TYPE_HEADER] [ literal[int] ],
identifier[_DEFAULT_ACCEPT_HEADER] [ literal[int] ]: identifier[_DEFAULT_ACCEPT_HEADER] [ literal[int] ],
literal[string] : identifier[_to_str] ( identifier[if_match] )
}
identifier[request] . identifier[body] = identifier[_get_request_body] ( identifier[_convert_entity_to_json] ( identifier[entity] ))
keyword[return] identifier[request]
|
def _merge_entity(entity, if_match, require_encryption=False, key_encryption_key=None):
"""
Constructs a merge entity request.
"""
_validate_not_none('if_match', if_match)
_validate_entity(entity)
_validate_encryption_unsupported(require_encryption, key_encryption_key)
request = HTTPRequest()
request.method = 'MERGE'
request.headers = {_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1], _DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1], 'If-Match': _to_str(if_match)}
request.body = _get_request_body(_convert_entity_to_json(entity))
return request
|
def genCubeVector(x, y, z, x_mult=1, y_mult=1, z_mult=1):
"""Generates a map of vector lengths from the center point to each coordinate
x - width of matrix to generate
y - height of matrix to generate
z - depth of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
z_mult - value to scale z-axis by
"""
cX = (x - 1) / 2.0
cY = (y - 1) / 2.0
cZ = (z - 1) / 2.0
def vect(_x, _y, _z):
return int(math.sqrt(math.pow(_x - cX, 2 * x_mult) +
math.pow(_y - cY, 2 * y_mult) +
math.pow(_z - cZ, 2 * z_mult)))
return [[[vect(_x, _y, _z) for _z in range(z)] for _y in range(y)] for _x in range(x)]
|
def function[genCubeVector, parameter[x, y, z, x_mult, y_mult, z_mult]]:
constant[Generates a map of vector lengths from the center point to each coordinate
x - width of matrix to generate
y - height of matrix to generate
z - depth of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
z_mult - value to scale z-axis by
]
variable[cX] assign[=] binary_operation[binary_operation[name[x] - constant[1]] / constant[2.0]]
variable[cY] assign[=] binary_operation[binary_operation[name[y] - constant[1]] / constant[2.0]]
variable[cZ] assign[=] binary_operation[binary_operation[name[z] - constant[1]] / constant[2.0]]
def function[vect, parameter[_x, _y, _z]]:
return[call[name[int], parameter[call[name[math].sqrt, parameter[binary_operation[binary_operation[call[name[math].pow, parameter[binary_operation[name[_x] - name[cX]], binary_operation[constant[2] * name[x_mult]]]] + call[name[math].pow, parameter[binary_operation[name[_y] - name[cY]], binary_operation[constant[2] * name[y_mult]]]]] + call[name[math].pow, parameter[binary_operation[name[_z] - name[cZ]], binary_operation[constant[2] * name[z_mult]]]]]]]]]]
return[<ast.ListComp object at 0x7da18f7213f0>]
|
keyword[def] identifier[genCubeVector] ( identifier[x] , identifier[y] , identifier[z] , identifier[x_mult] = literal[int] , identifier[y_mult] = literal[int] , identifier[z_mult] = literal[int] ):
literal[string]
identifier[cX] =( identifier[x] - literal[int] )/ literal[int]
identifier[cY] =( identifier[y] - literal[int] )/ literal[int]
identifier[cZ] =( identifier[z] - literal[int] )/ literal[int]
keyword[def] identifier[vect] ( identifier[_x] , identifier[_y] , identifier[_z] ):
keyword[return] identifier[int] ( identifier[math] . identifier[sqrt] ( identifier[math] . identifier[pow] ( identifier[_x] - identifier[cX] , literal[int] * identifier[x_mult] )+
identifier[math] . identifier[pow] ( identifier[_y] - identifier[cY] , literal[int] * identifier[y_mult] )+
identifier[math] . identifier[pow] ( identifier[_z] - identifier[cZ] , literal[int] * identifier[z_mult] )))
keyword[return] [[[ identifier[vect] ( identifier[_x] , identifier[_y] , identifier[_z] ) keyword[for] identifier[_z] keyword[in] identifier[range] ( identifier[z] )] keyword[for] identifier[_y] keyword[in] identifier[range] ( identifier[y] )] keyword[for] identifier[_x] keyword[in] identifier[range] ( identifier[x] )]
|
def genCubeVector(x, y, z, x_mult=1, y_mult=1, z_mult=1):
"""Generates a map of vector lengths from the center point to each coordinate
x - width of matrix to generate
y - height of matrix to generate
z - depth of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
z_mult - value to scale z-axis by
"""
cX = (x - 1) / 2.0
cY = (y - 1) / 2.0
cZ = (z - 1) / 2.0
def vect(_x, _y, _z):
return int(math.sqrt(math.pow(_x - cX, 2 * x_mult) + math.pow(_y - cY, 2 * y_mult) + math.pow(_z - cZ, 2 * z_mult)))
return [[[vect(_x, _y, _z) for _z in range(z)] for _y in range(y)] for _x in range(x)]
|
def parse_order(text):
"""
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
"""
orders = []
for i in map(str.strip, text.split(',')):
items = i.split('.', 2)
if len(items) == 1: column, order = items[0], 'default'
elif len(items) == 2: column, order = items
else: raise InvalidParams("Invalid order syntax")
order = order.lower()
if order not in ('asc', 'desc', 'default'):
raise InvalidParams('Invalid order mode: %s' % order)
if order != 'default':
orders.append(SQLQueryOrder(column, order))
return orders
|
def function[parse_order, parameter[text]]:
constant[
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
]
variable[orders] assign[=] list[[]]
for taget[name[i]] in starred[call[name[map], parameter[name[str].strip, call[name[text].split, parameter[constant[,]]]]]] begin[:]
variable[items] assign[=] call[name[i].split, parameter[constant[.], constant[2]]]
if compare[call[name[len], parameter[name[items]]] equal[==] constant[1]] begin[:]
<ast.Tuple object at 0x7da1b0065bd0> assign[=] tuple[[<ast.Subscript object at 0x7da1b0067400>, <ast.Constant object at 0x7da1b0067340>]]
variable[order] assign[=] call[name[order].lower, parameter[]]
if compare[name[order] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b0066650>, <ast.Constant object at 0x7da1b0064f10>, <ast.Constant object at 0x7da1b0067430>]]] begin[:]
<ast.Raise object at 0x7da1b00653f0>
if compare[name[order] not_equal[!=] constant[default]] begin[:]
call[name[orders].append, parameter[call[name[SQLQueryOrder], parameter[name[column], name[order]]]]]
return[name[orders]]
|
keyword[def] identifier[parse_order] ( identifier[text] ):
literal[string]
identifier[orders] =[]
keyword[for] identifier[i] keyword[in] identifier[map] ( identifier[str] . identifier[strip] , identifier[text] . identifier[split] ( literal[string] )):
identifier[items] = identifier[i] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[items] )== literal[int] : identifier[column] , identifier[order] = identifier[items] [ literal[int] ], literal[string]
keyword[elif] identifier[len] ( identifier[items] )== literal[int] : identifier[column] , identifier[order] = identifier[items]
keyword[else] : keyword[raise] identifier[InvalidParams] ( literal[string] )
identifier[order] = identifier[order] . identifier[lower] ()
keyword[if] identifier[order] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[raise] identifier[InvalidParams] ( literal[string] % identifier[order] )
keyword[if] identifier[order] != literal[string] :
identifier[orders] . identifier[append] ( identifier[SQLQueryOrder] ( identifier[column] , identifier[order] ))
keyword[return] identifier[orders]
|
def parse_order(text):
"""
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
"""
orders = []
for i in map(str.strip, text.split(',')):
items = i.split('.', 2)
if len(items) == 1:
(column, order) = (items[0], 'default') # depends on [control=['if'], data=[]]
elif len(items) == 2:
(column, order) = items # depends on [control=['if'], data=[]]
else:
raise InvalidParams('Invalid order syntax')
order = order.lower()
if order not in ('asc', 'desc', 'default'):
raise InvalidParams('Invalid order mode: %s' % order) # depends on [control=['if'], data=['order']]
if order != 'default':
orders.append(SQLQueryOrder(column, order)) # depends on [control=['if'], data=['order']] # depends on [control=['for'], data=['i']]
return orders
|
def list(self, *kinds, **kwargs):
"""Returns a list of inputs that are in the :class:`Inputs` collection.
You can also filter by one or more input kinds.
This function iterates over all possible inputs, regardless of any arguments you
specify. Because the :class:`Inputs` collection is the union of all the inputs of each
kind, this method implements parameters such as "count", "search", and so
on at the Python level once all the data has been fetched. The exception
is when you specify a single input kind, and then this method makes a single request
with the usual semantics for parameters.
:param kinds: The input kinds to return (optional).
- "ad": Active Directory
- "monitor": Files and directories
- "registry": Windows Registry
- "script": Scripts
- "splunktcp": TCP, processed
- "tcp": TCP, unprocessed
- "udp": UDP
- "win-event-log-collections": Windows event log
- "win-perfmon": Performance monitoring
- "win-wmi-collections": WMI
:type kinds: ``string``
:param kwargs: Additional arguments (optional):
- "count" (``integer``): The maximum number of items to return.
- "offset" (``integer``): The offset of the first item to return.
- "search" (``string``): The search query to filter responses.
- "sort_dir" (``string``): The direction to sort returned items:
"asc" or "desc".
- "sort_key" (``string``): The field to use for sorting (optional).
- "sort_mode" (``string``): The collating sequence for sorting
returned items: "auto", "alpha", "alpha_case", or "num".
:type kwargs: ``dict``
:return: A list of input kinds.
:rtype: ``list``
"""
if len(kinds) == 0:
kinds = self.kinds
if len(kinds) == 1:
kind = kinds[0]
logging.debug("Inputs.list taking short circuit branch for single kind.")
path = self.kindpath(kind)
logging.debug("Path for inputs: %s", path)
try:
path = UrlEncoded(path, skip_encode=True)
response = self.get(path, **kwargs)
except HTTPError as he:
if he.status == 404: # No inputs of this kind
return []
entities = []
entries = _load_atom_entries(response)
if entries is None:
return [] # No inputs in a collection comes back with no feed or entry in the XML
for entry in entries:
state = _parse_atom_entry(entry)
# Unquote the URL, since all URL encoded in the SDK
# should be of type UrlEncoded, and all str should not
# be URL encoded.
path = urllib.parse.unquote(state.links.alternate)
entity = Input(self.service, path, kind, state=state)
entities.append(entity)
return entities
search = kwargs.get('search', '*')
entities = []
for kind in kinds:
response = None
try:
kind = UrlEncoded(kind, skip_encode=True)
response = self.get(self.kindpath(kind), search=search)
except HTTPError as e:
if e.status == 404:
continue # No inputs of this kind
else:
raise
entries = _load_atom_entries(response)
if entries is None: continue # No inputs to process
for entry in entries:
state = _parse_atom_entry(entry)
# Unquote the URL, since all URL encoded in the SDK
# should be of type UrlEncoded, and all str should not
# be URL encoded.
path = urllib.parse.unquote(state.links.alternate)
entity = Input(self.service, path, kind, state=state)
entities.append(entity)
if 'offset' in kwargs:
entities = entities[kwargs['offset']:]
if 'count' in kwargs:
entities = entities[:kwargs['count']]
if kwargs.get('sort_mode', None) == 'alpha':
sort_field = kwargs.get('sort_field', 'name')
if sort_field == 'name':
f = lambda x: x.name.lower()
else:
f = lambda x: x[sort_field].lower()
entities = sorted(entities, key=f)
if kwargs.get('sort_mode', None) == 'alpha_case':
sort_field = kwargs.get('sort_field', 'name')
if sort_field == 'name':
f = lambda x: x.name
else:
f = lambda x: x[sort_field]
entities = sorted(entities, key=f)
if kwargs.get('sort_dir', 'asc') == 'desc':
entities = list(reversed(entities))
return entities
|
def function[list, parameter[self]]:
constant[Returns a list of inputs that are in the :class:`Inputs` collection.
You can also filter by one or more input kinds.
This function iterates over all possible inputs, regardless of any arguments you
specify. Because the :class:`Inputs` collection is the union of all the inputs of each
kind, this method implements parameters such as "count", "search", and so
on at the Python level once all the data has been fetched. The exception
is when you specify a single input kind, and then this method makes a single request
with the usual semantics for parameters.
:param kinds: The input kinds to return (optional).
- "ad": Active Directory
- "monitor": Files and directories
- "registry": Windows Registry
- "script": Scripts
- "splunktcp": TCP, processed
- "tcp": TCP, unprocessed
- "udp": UDP
- "win-event-log-collections": Windows event log
- "win-perfmon": Performance monitoring
- "win-wmi-collections": WMI
:type kinds: ``string``
:param kwargs: Additional arguments (optional):
- "count" (``integer``): The maximum number of items to return.
- "offset" (``integer``): The offset of the first item to return.
- "search" (``string``): The search query to filter responses.
- "sort_dir" (``string``): The direction to sort returned items:
"asc" or "desc".
- "sort_key" (``string``): The field to use for sorting (optional).
- "sort_mode" (``string``): The collating sequence for sorting
returned items: "auto", "alpha", "alpha_case", or "num".
:type kwargs: ``dict``
:return: A list of input kinds.
:rtype: ``list``
]
if compare[call[name[len], parameter[name[kinds]]] equal[==] constant[0]] begin[:]
variable[kinds] assign[=] name[self].kinds
if compare[call[name[len], parameter[name[kinds]]] equal[==] constant[1]] begin[:]
variable[kind] assign[=] call[name[kinds]][constant[0]]
call[name[logging].debug, parameter[constant[Inputs.list taking short circuit branch for single kind.]]]
variable[path] assign[=] call[name[self].kindpath, parameter[name[kind]]]
call[name[logging].debug, parameter[constant[Path for inputs: %s], name[path]]]
<ast.Try object at 0x7da1b1983e80>
variable[entities] assign[=] list[[]]
variable[entries] assign[=] call[name[_load_atom_entries], parameter[name[response]]]
if compare[name[entries] is constant[None]] begin[:]
return[list[[]]]
for taget[name[entry]] in starred[name[entries]] begin[:]
variable[state] assign[=] call[name[_parse_atom_entry], parameter[name[entry]]]
variable[path] assign[=] call[name[urllib].parse.unquote, parameter[name[state].links.alternate]]
variable[entity] assign[=] call[name[Input], parameter[name[self].service, name[path], name[kind]]]
call[name[entities].append, parameter[name[entity]]]
return[name[entities]]
variable[search] assign[=] call[name[kwargs].get, parameter[constant[search], constant[*]]]
variable[entities] assign[=] list[[]]
for taget[name[kind]] in starred[name[kinds]] begin[:]
variable[response] assign[=] constant[None]
<ast.Try object at 0x7da1b1980a90>
variable[entries] assign[=] call[name[_load_atom_entries], parameter[name[response]]]
if compare[name[entries] is constant[None]] begin[:]
continue
for taget[name[entry]] in starred[name[entries]] begin[:]
variable[state] assign[=] call[name[_parse_atom_entry], parameter[name[entry]]]
variable[path] assign[=] call[name[urllib].parse.unquote, parameter[name[state].links.alternate]]
variable[entity] assign[=] call[name[Input], parameter[name[self].service, name[path], name[kind]]]
call[name[entities].append, parameter[name[entity]]]
if compare[constant[offset] in name[kwargs]] begin[:]
variable[entities] assign[=] call[name[entities]][<ast.Slice object at 0x7da1b1980790>]
if compare[constant[count] in name[kwargs]] begin[:]
variable[entities] assign[=] call[name[entities]][<ast.Slice object at 0x7da1b1982da0>]
if compare[call[name[kwargs].get, parameter[constant[sort_mode], constant[None]]] equal[==] constant[alpha]] begin[:]
variable[sort_field] assign[=] call[name[kwargs].get, parameter[constant[sort_field], constant[name]]]
if compare[name[sort_field] equal[==] constant[name]] begin[:]
variable[f] assign[=] <ast.Lambda object at 0x7da1b194faf0>
variable[entities] assign[=] call[name[sorted], parameter[name[entities]]]
if compare[call[name[kwargs].get, parameter[constant[sort_mode], constant[None]]] equal[==] constant[alpha_case]] begin[:]
variable[sort_field] assign[=] call[name[kwargs].get, parameter[constant[sort_field], constant[name]]]
if compare[name[sort_field] equal[==] constant[name]] begin[:]
variable[f] assign[=] <ast.Lambda object at 0x7da1b194c1f0>
variable[entities] assign[=] call[name[sorted], parameter[name[entities]]]
if compare[call[name[kwargs].get, parameter[constant[sort_dir], constant[asc]]] equal[==] constant[desc]] begin[:]
variable[entities] assign[=] call[name[list], parameter[call[name[reversed], parameter[name[entities]]]]]
return[name[entities]]
|
keyword[def] identifier[list] ( identifier[self] ,* identifier[kinds] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[kinds] )== literal[int] :
identifier[kinds] = identifier[self] . identifier[kinds]
keyword[if] identifier[len] ( identifier[kinds] )== literal[int] :
identifier[kind] = identifier[kinds] [ literal[int] ]
identifier[logging] . identifier[debug] ( literal[string] )
identifier[path] = identifier[self] . identifier[kindpath] ( identifier[kind] )
identifier[logging] . identifier[debug] ( literal[string] , identifier[path] )
keyword[try] :
identifier[path] = identifier[UrlEncoded] ( identifier[path] , identifier[skip_encode] = keyword[True] )
identifier[response] = identifier[self] . identifier[get] ( identifier[path] ,** identifier[kwargs] )
keyword[except] identifier[HTTPError] keyword[as] identifier[he] :
keyword[if] identifier[he] . identifier[status] == literal[int] :
keyword[return] []
identifier[entities] =[]
identifier[entries] = identifier[_load_atom_entries] ( identifier[response] )
keyword[if] identifier[entries] keyword[is] keyword[None] :
keyword[return] []
keyword[for] identifier[entry] keyword[in] identifier[entries] :
identifier[state] = identifier[_parse_atom_entry] ( identifier[entry] )
identifier[path] = identifier[urllib] . identifier[parse] . identifier[unquote] ( identifier[state] . identifier[links] . identifier[alternate] )
identifier[entity] = identifier[Input] ( identifier[self] . identifier[service] , identifier[path] , identifier[kind] , identifier[state] = identifier[state] )
identifier[entities] . identifier[append] ( identifier[entity] )
keyword[return] identifier[entities]
identifier[search] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[entities] =[]
keyword[for] identifier[kind] keyword[in] identifier[kinds] :
identifier[response] = keyword[None]
keyword[try] :
identifier[kind] = identifier[UrlEncoded] ( identifier[kind] , identifier[skip_encode] = keyword[True] )
identifier[response] = identifier[self] . identifier[get] ( identifier[self] . identifier[kindpath] ( identifier[kind] ), identifier[search] = identifier[search] )
keyword[except] identifier[HTTPError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[status] == literal[int] :
keyword[continue]
keyword[else] :
keyword[raise]
identifier[entries] = identifier[_load_atom_entries] ( identifier[response] )
keyword[if] identifier[entries] keyword[is] keyword[None] : keyword[continue]
keyword[for] identifier[entry] keyword[in] identifier[entries] :
identifier[state] = identifier[_parse_atom_entry] ( identifier[entry] )
identifier[path] = identifier[urllib] . identifier[parse] . identifier[unquote] ( identifier[state] . identifier[links] . identifier[alternate] )
identifier[entity] = identifier[Input] ( identifier[self] . identifier[service] , identifier[path] , identifier[kind] , identifier[state] = identifier[state] )
identifier[entities] . identifier[append] ( identifier[entity] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[entities] = identifier[entities] [ identifier[kwargs] [ literal[string] ]:]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[entities] = identifier[entities] [: identifier[kwargs] [ literal[string] ]]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )== literal[string] :
identifier[sort_field] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[sort_field] == literal[string] :
identifier[f] = keyword[lambda] identifier[x] : identifier[x] . identifier[name] . identifier[lower] ()
keyword[else] :
identifier[f] = keyword[lambda] identifier[x] : identifier[x] [ identifier[sort_field] ]. identifier[lower] ()
identifier[entities] = identifier[sorted] ( identifier[entities] , identifier[key] = identifier[f] )
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )== literal[string] :
identifier[sort_field] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[sort_field] == literal[string] :
identifier[f] = keyword[lambda] identifier[x] : identifier[x] . identifier[name]
keyword[else] :
identifier[f] = keyword[lambda] identifier[x] : identifier[x] [ identifier[sort_field] ]
identifier[entities] = identifier[sorted] ( identifier[entities] , identifier[key] = identifier[f] )
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )== literal[string] :
identifier[entities] = identifier[list] ( identifier[reversed] ( identifier[entities] ))
keyword[return] identifier[entities]
|
def list(self, *kinds, **kwargs):
"""Returns a list of inputs that are in the :class:`Inputs` collection.
You can also filter by one or more input kinds.
This function iterates over all possible inputs, regardless of any arguments you
specify. Because the :class:`Inputs` collection is the union of all the inputs of each
kind, this method implements parameters such as "count", "search", and so
on at the Python level once all the data has been fetched. The exception
is when you specify a single input kind, and then this method makes a single request
with the usual semantics for parameters.
:param kinds: The input kinds to return (optional).
- "ad": Active Directory
- "monitor": Files and directories
- "registry": Windows Registry
- "script": Scripts
- "splunktcp": TCP, processed
- "tcp": TCP, unprocessed
- "udp": UDP
- "win-event-log-collections": Windows event log
- "win-perfmon": Performance monitoring
- "win-wmi-collections": WMI
:type kinds: ``string``
:param kwargs: Additional arguments (optional):
- "count" (``integer``): The maximum number of items to return.
- "offset" (``integer``): The offset of the first item to return.
- "search" (``string``): The search query to filter responses.
- "sort_dir" (``string``): The direction to sort returned items:
"asc" or "desc".
- "sort_key" (``string``): The field to use for sorting (optional).
- "sort_mode" (``string``): The collating sequence for sorting
returned items: "auto", "alpha", "alpha_case", or "num".
:type kwargs: ``dict``
:return: A list of input kinds.
:rtype: ``list``
"""
if len(kinds) == 0:
kinds = self.kinds # depends on [control=['if'], data=[]]
if len(kinds) == 1:
kind = kinds[0]
logging.debug('Inputs.list taking short circuit branch for single kind.')
path = self.kindpath(kind)
logging.debug('Path for inputs: %s', path)
try:
path = UrlEncoded(path, skip_encode=True)
response = self.get(path, **kwargs) # depends on [control=['try'], data=[]]
except HTTPError as he:
if he.status == 404: # No inputs of this kind
return [] # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['he']]
entities = []
entries = _load_atom_entries(response)
if entries is None:
return [] # No inputs in a collection comes back with no feed or entry in the XML # depends on [control=['if'], data=[]]
for entry in entries:
state = _parse_atom_entry(entry)
# Unquote the URL, since all URL encoded in the SDK
# should be of type UrlEncoded, and all str should not
# be URL encoded.
path = urllib.parse.unquote(state.links.alternate)
entity = Input(self.service, path, kind, state=state)
entities.append(entity) # depends on [control=['for'], data=['entry']]
return entities # depends on [control=['if'], data=[]]
search = kwargs.get('search', '*')
entities = []
for kind in kinds:
response = None
try:
kind = UrlEncoded(kind, skip_encode=True)
response = self.get(self.kindpath(kind), search=search) # depends on [control=['try'], data=[]]
except HTTPError as e:
if e.status == 404:
continue # No inputs of this kind # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']]
entries = _load_atom_entries(response)
if entries is None:
continue # No inputs to process # depends on [control=['if'], data=[]]
for entry in entries:
state = _parse_atom_entry(entry)
# Unquote the URL, since all URL encoded in the SDK
# should be of type UrlEncoded, and all str should not
# be URL encoded.
path = urllib.parse.unquote(state.links.alternate)
entity = Input(self.service, path, kind, state=state)
entities.append(entity) # depends on [control=['for'], data=['entry']] # depends on [control=['for'], data=['kind']]
if 'offset' in kwargs:
entities = entities[kwargs['offset']:] # depends on [control=['if'], data=['kwargs']]
if 'count' in kwargs:
entities = entities[:kwargs['count']] # depends on [control=['if'], data=['kwargs']]
if kwargs.get('sort_mode', None) == 'alpha':
sort_field = kwargs.get('sort_field', 'name')
if sort_field == 'name':
f = lambda x: x.name.lower() # depends on [control=['if'], data=[]]
else:
f = lambda x: x[sort_field].lower()
entities = sorted(entities, key=f) # depends on [control=['if'], data=[]]
if kwargs.get('sort_mode', None) == 'alpha_case':
sort_field = kwargs.get('sort_field', 'name')
if sort_field == 'name':
f = lambda x: x.name # depends on [control=['if'], data=[]]
else:
f = lambda x: x[sort_field]
entities = sorted(entities, key=f) # depends on [control=['if'], data=[]]
if kwargs.get('sort_dir', 'asc') == 'desc':
entities = list(reversed(entities)) # depends on [control=['if'], data=[]]
return entities
|
def server_identity_is_verified(self):
""" GPGAuth stage0 """
# Encrypt a uuid token for the server
server_verify_token = self.gpg.encrypt(self._nonce0,
self.server_fingerprint, always_trust=True)
if not server_verify_token.ok:
raise GPGAuthStage0Exception(
'Encryption of the nonce0 (%s) '
'to the server fingerprint (%s) failed.' %
(self._nonce0, self.server_fingerprint)
)
server_verify_response = post_server_verify_token(
self,
keyid=self.user_fingerprint,
server_verify_token=str(server_verify_token)
)
if not check_server_verify_response(server_verify_response):
raise GPGAuthStage0Exception("Verify endpoint wrongly formatted")
if server_verify_response.headers.get('X-GPGAuth-Verify-Response') != self._nonce0:
raise GPGAuthStage0Exception(
'The server decrypted something different than what we sent '
'(%s <> %s)' %
(server_verify_response.headers.get('X-GPGAuth-Verify-Response'), self._nonce0))
logger.info('server_identity_is_verified: OK')
return True
|
def function[server_identity_is_verified, parameter[self]]:
constant[ GPGAuth stage0 ]
variable[server_verify_token] assign[=] call[name[self].gpg.encrypt, parameter[name[self]._nonce0, name[self].server_fingerprint]]
if <ast.UnaryOp object at 0x7da18ede4fd0> begin[:]
<ast.Raise object at 0x7da18ede57e0>
variable[server_verify_response] assign[=] call[name[post_server_verify_token], parameter[name[self]]]
if <ast.UnaryOp object at 0x7da18ede7f10> begin[:]
<ast.Raise object at 0x7da18ede7310>
if compare[call[name[server_verify_response].headers.get, parameter[constant[X-GPGAuth-Verify-Response]]] not_equal[!=] name[self]._nonce0] begin[:]
<ast.Raise object at 0x7da18ede6500>
call[name[logger].info, parameter[constant[server_identity_is_verified: OK]]]
return[constant[True]]
|
keyword[def] identifier[server_identity_is_verified] ( identifier[self] ):
literal[string]
identifier[server_verify_token] = identifier[self] . identifier[gpg] . identifier[encrypt] ( identifier[self] . identifier[_nonce0] ,
identifier[self] . identifier[server_fingerprint] , identifier[always_trust] = keyword[True] )
keyword[if] keyword[not] identifier[server_verify_token] . identifier[ok] :
keyword[raise] identifier[GPGAuthStage0Exception] (
literal[string]
literal[string] %
( identifier[self] . identifier[_nonce0] , identifier[self] . identifier[server_fingerprint] )
)
identifier[server_verify_response] = identifier[post_server_verify_token] (
identifier[self] ,
identifier[keyid] = identifier[self] . identifier[user_fingerprint] ,
identifier[server_verify_token] = identifier[str] ( identifier[server_verify_token] )
)
keyword[if] keyword[not] identifier[check_server_verify_response] ( identifier[server_verify_response] ):
keyword[raise] identifier[GPGAuthStage0Exception] ( literal[string] )
keyword[if] identifier[server_verify_response] . identifier[headers] . identifier[get] ( literal[string] )!= identifier[self] . identifier[_nonce0] :
keyword[raise] identifier[GPGAuthStage0Exception] (
literal[string]
literal[string] %
( identifier[server_verify_response] . identifier[headers] . identifier[get] ( literal[string] ), identifier[self] . identifier[_nonce0] ))
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] keyword[True]
|
def server_identity_is_verified(self):
""" GPGAuth stage0 """
# Encrypt a uuid token for the server
server_verify_token = self.gpg.encrypt(self._nonce0, self.server_fingerprint, always_trust=True)
if not server_verify_token.ok:
raise GPGAuthStage0Exception('Encryption of the nonce0 (%s) to the server fingerprint (%s) failed.' % (self._nonce0, self.server_fingerprint)) # depends on [control=['if'], data=[]]
server_verify_response = post_server_verify_token(self, keyid=self.user_fingerprint, server_verify_token=str(server_verify_token))
if not check_server_verify_response(server_verify_response):
raise GPGAuthStage0Exception('Verify endpoint wrongly formatted') # depends on [control=['if'], data=[]]
if server_verify_response.headers.get('X-GPGAuth-Verify-Response') != self._nonce0:
raise GPGAuthStage0Exception('The server decrypted something different than what we sent (%s <> %s)' % (server_verify_response.headers.get('X-GPGAuth-Verify-Response'), self._nonce0)) # depends on [control=['if'], data=[]]
logger.info('server_identity_is_verified: OK')
return True
|
def get_chunks(Array, Chunksize):
"""Generator that yields chunks of size ChunkSize"""
for i in range(0, len(Array), Chunksize):
yield Array[i:i + Chunksize]
|
def function[get_chunks, parameter[Array, Chunksize]]:
constant[Generator that yields chunks of size ChunkSize]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[Array]]], name[Chunksize]]]] begin[:]
<ast.Yield object at 0x7da20e963b50>
|
keyword[def] identifier[get_chunks] ( identifier[Array] , identifier[Chunksize] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[Array] ), identifier[Chunksize] ):
keyword[yield] identifier[Array] [ identifier[i] : identifier[i] + identifier[Chunksize] ]
|
def get_chunks(Array, Chunksize):
"""Generator that yields chunks of size ChunkSize"""
for i in range(0, len(Array), Chunksize):
yield Array[i:i + Chunksize] # depends on [control=['for'], data=['i']]
|
def load_outputs(self):
"""Load output module(s)"""
for output in sorted(logdissect.output.__formats__):
self.output_modules[output] = \
__import__('logdissect.output.' + output, globals(), \
locals(), [logdissect]).OutputModule(args=self.output_args)
|
def function[load_outputs, parameter[self]]:
constant[Load output module(s)]
for taget[name[output]] in starred[call[name[sorted], parameter[name[logdissect].output.__formats__]]] begin[:]
call[name[self].output_modules][name[output]] assign[=] call[call[name[__import__], parameter[binary_operation[constant[logdissect.output.] + name[output]], call[name[globals], parameter[]], call[name[locals], parameter[]], list[[<ast.Name object at 0x7da204620310>]]]].OutputModule, parameter[]]
|
keyword[def] identifier[load_outputs] ( identifier[self] ):
literal[string]
keyword[for] identifier[output] keyword[in] identifier[sorted] ( identifier[logdissect] . identifier[output] . identifier[__formats__] ):
identifier[self] . identifier[output_modules] [ identifier[output] ]= identifier[__import__] ( literal[string] + identifier[output] , identifier[globals] (), identifier[locals] (),[ identifier[logdissect] ]). identifier[OutputModule] ( identifier[args] = identifier[self] . identifier[output_args] )
|
def load_outputs(self):
"""Load output module(s)"""
for output in sorted(logdissect.output.__formats__):
self.output_modules[output] = __import__('logdissect.output.' + output, globals(), locals(), [logdissect]).OutputModule(args=self.output_args) # depends on [control=['for'], data=['output']]
|
def create(self, friendly_name, aws_credentials_sid=values.unset,
encryption_key_sid=values.unset, aws_s3_url=values.unset,
aws_storage_enabled=values.unset, encryption_enabled=values.unset):
"""
Create a new RecordingSettingsInstance
:param unicode friendly_name: Friendly name of the configuration to be shown in the console
:param unicode aws_credentials_sid: SID of the Stored Credential resource CRxx
:param unicode encryption_key_sid: SID of the Public Key resource CRxx
:param unicode aws_s3_url: Identity of the external location where the recordings should be stored. We only support DNS-compliant URLs like http://<my-bucket>.s3-<aws-region>.amazonaws.com/recordings, where recordings is the path where you want recordings to be stored.
:param bool aws_storage_enabled: true|false When set to true, all Recordings will be written to the AwsS3Url specified above. When set to false, all Recordings will be stored in Twilio's cloud.
:param bool encryption_enabled: true|false When set to true, all Recordings will be stored encrypted.
:returns: Newly created RecordingSettingsInstance
:rtype: twilio.rest.video.v1.recording_settings.RecordingSettingsInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'AwsCredentialsSid': aws_credentials_sid,
'EncryptionKeySid': encryption_key_sid,
'AwsS3Url': aws_s3_url,
'AwsStorageEnabled': aws_storage_enabled,
'EncryptionEnabled': encryption_enabled,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return RecordingSettingsInstance(self._version, payload, )
|
def function[create, parameter[self, friendly_name, aws_credentials_sid, encryption_key_sid, aws_s3_url, aws_storage_enabled, encryption_enabled]]:
constant[
Create a new RecordingSettingsInstance
:param unicode friendly_name: Friendly name of the configuration to be shown in the console
:param unicode aws_credentials_sid: SID of the Stored Credential resource CRxx
:param unicode encryption_key_sid: SID of the Public Key resource CRxx
:param unicode aws_s3_url: Identity of the external location where the recordings should be stored. We only support DNS-compliant URLs like http://<my-bucket>.s3-<aws-region>.amazonaws.com/recordings, where recordings is the path where you want recordings to be stored.
:param bool aws_storage_enabled: true|false When set to true, all Recordings will be written to the AwsS3Url specified above. When set to false, all Recordings will be stored in Twilio's cloud.
:param bool encryption_enabled: true|false When set to true, all Recordings will be stored encrypted.
:returns: Newly created RecordingSettingsInstance
:rtype: twilio.rest.video.v1.recording_settings.RecordingSettingsInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da2054a5780>, <ast.Constant object at 0x7da2054a6b60>, <ast.Constant object at 0x7da2054a5840>, <ast.Constant object at 0x7da2054a63b0>, <ast.Constant object at 0x7da2054a47c0>, <ast.Constant object at 0x7da2054a60e0>], [<ast.Name object at 0x7da2054a6cb0>, <ast.Name object at 0x7da2054a52d0>, <ast.Name object at 0x7da2054a5330>, <ast.Name object at 0x7da2054a6830>, <ast.Name object at 0x7da2054a70d0>, <ast.Name object at 0x7da2054a4850>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[RecordingSettingsInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[friendly_name] , identifier[aws_credentials_sid] = identifier[values] . identifier[unset] ,
identifier[encryption_key_sid] = identifier[values] . identifier[unset] , identifier[aws_s3_url] = identifier[values] . identifier[unset] ,
identifier[aws_storage_enabled] = identifier[values] . identifier[unset] , identifier[encryption_enabled] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({
literal[string] : identifier[friendly_name] ,
literal[string] : identifier[aws_credentials_sid] ,
literal[string] : identifier[encryption_key_sid] ,
literal[string] : identifier[aws_s3_url] ,
literal[string] : identifier[aws_storage_enabled] ,
literal[string] : identifier[encryption_enabled] ,
})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[RecordingSettingsInstance] ( identifier[self] . identifier[_version] , identifier[payload] ,)
|
def create(self, friendly_name, aws_credentials_sid=values.unset, encryption_key_sid=values.unset, aws_s3_url=values.unset, aws_storage_enabled=values.unset, encryption_enabled=values.unset):
"""
Create a new RecordingSettingsInstance
:param unicode friendly_name: Friendly name of the configuration to be shown in the console
:param unicode aws_credentials_sid: SID of the Stored Credential resource CRxx
:param unicode encryption_key_sid: SID of the Public Key resource CRxx
:param unicode aws_s3_url: Identity of the external location where the recordings should be stored. We only support DNS-compliant URLs like http://<my-bucket>.s3-<aws-region>.amazonaws.com/recordings, where recordings is the path where you want recordings to be stored.
:param bool aws_storage_enabled: true|false When set to true, all Recordings will be written to the AwsS3Url specified above. When set to false, all Recordings will be stored in Twilio's cloud.
:param bool encryption_enabled: true|false When set to true, all Recordings will be stored encrypted.
:returns: Newly created RecordingSettingsInstance
:rtype: twilio.rest.video.v1.recording_settings.RecordingSettingsInstance
"""
data = values.of({'FriendlyName': friendly_name, 'AwsCredentialsSid': aws_credentials_sid, 'EncryptionKeySid': encryption_key_sid, 'AwsS3Url': aws_s3_url, 'AwsStorageEnabled': aws_storage_enabled, 'EncryptionEnabled': encryption_enabled})
payload = self._version.create('POST', self._uri, data=data)
return RecordingSettingsInstance(self._version, payload)
|
def renegotiate_keys(self):
"""
Force this session to switch to new keys. Normally this is done
automatically after the session hits a certain number of packets or
bytes sent or received, but this method gives you the option of forcing
new keys whenever you want. Negotiating new keys causes a pause in
traffic both ways as the two sides swap keys and do computations. This
method returns when the session has switched to new keys.
@raise SSHException: if the key renegotiation failed (which causes the
session to end)
"""
self.completion_event = threading.Event()
self._send_kex_init()
while True:
self.completion_event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException('Negotiation failed.')
if self.completion_event.isSet():
break
return
|
def function[renegotiate_keys, parameter[self]]:
constant[
Force this session to switch to new keys. Normally this is done
automatically after the session hits a certain number of packets or
bytes sent or received, but this method gives you the option of forcing
new keys whenever you want. Negotiating new keys causes a pause in
traffic both ways as the two sides swap keys and do computations. This
method returns when the session has switched to new keys.
@raise SSHException: if the key renegotiation failed (which causes the
session to end)
]
name[self].completion_event assign[=] call[name[threading].Event, parameter[]]
call[name[self]._send_kex_init, parameter[]]
while constant[True] begin[:]
call[name[self].completion_event.wait, parameter[constant[0.1]]]
if <ast.UnaryOp object at 0x7da1b0f52890> begin[:]
variable[e] assign[=] call[name[self].get_exception, parameter[]]
if compare[name[e] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da20c76e500>
<ast.Raise object at 0x7da1b11be800>
if call[name[self].completion_event.isSet, parameter[]] begin[:]
break
return[None]
|
keyword[def] identifier[renegotiate_keys] ( identifier[self] ):
literal[string]
identifier[self] . identifier[completion_event] = identifier[threading] . identifier[Event] ()
identifier[self] . identifier[_send_kex_init] ()
keyword[while] keyword[True] :
identifier[self] . identifier[completion_event] . identifier[wait] ( literal[int] )
keyword[if] keyword[not] identifier[self] . identifier[active] :
identifier[e] = identifier[self] . identifier[get_exception] ()
keyword[if] identifier[e] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[e]
keyword[raise] identifier[SSHException] ( literal[string] )
keyword[if] identifier[self] . identifier[completion_event] . identifier[isSet] ():
keyword[break]
keyword[return]
|
def renegotiate_keys(self):
"""
Force this session to switch to new keys. Normally this is done
automatically after the session hits a certain number of packets or
bytes sent or received, but this method gives you the option of forcing
new keys whenever you want. Negotiating new keys causes a pause in
traffic both ways as the two sides swap keys and do computations. This
method returns when the session has switched to new keys.
@raise SSHException: if the key renegotiation failed (which causes the
session to end)
"""
self.completion_event = threading.Event()
self._send_kex_init()
while True:
self.completion_event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e # depends on [control=['if'], data=['e']]
raise SSHException('Negotiation failed.') # depends on [control=['if'], data=[]]
if self.completion_event.isSet():
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return
|
def get_barcode_umis(read, cell_barcode=False):
''' extract the umi +/- cell barcode from the read name where the barcodes
were extracted using umis'''
umi, cell = None, None
try:
read_name_elements = read.qname.split(":")
for element in read_name_elements:
if element.startswith("UMI_"):
umi = element[4:].encode('utf-8')
elif element.startswith("CELL_") and cell_barcode:
cell = element[5:].encode('utf-8')
if umi is None:
raise ValueError()
return umi, cell
except:
raise ValueError("Could not extract UMI +/- cell barcode from the "
"read tag")
|
def function[get_barcode_umis, parameter[read, cell_barcode]]:
constant[ extract the umi +/- cell barcode from the read name where the barcodes
were extracted using umis]
<ast.Tuple object at 0x7da20c794850> assign[=] tuple[[<ast.Constant object at 0x7da20c796350>, <ast.Constant object at 0x7da20c794d30>]]
<ast.Try object at 0x7da20c6c4a60>
|
keyword[def] identifier[get_barcode_umis] ( identifier[read] , identifier[cell_barcode] = keyword[False] ):
literal[string]
identifier[umi] , identifier[cell] = keyword[None] , keyword[None]
keyword[try] :
identifier[read_name_elements] = identifier[read] . identifier[qname] . identifier[split] ( literal[string] )
keyword[for] identifier[element] keyword[in] identifier[read_name_elements] :
keyword[if] identifier[element] . identifier[startswith] ( literal[string] ):
identifier[umi] = identifier[element] [ literal[int] :]. identifier[encode] ( literal[string] )
keyword[elif] identifier[element] . identifier[startswith] ( literal[string] ) keyword[and] identifier[cell_barcode] :
identifier[cell] = identifier[element] [ literal[int] :]. identifier[encode] ( literal[string] )
keyword[if] identifier[umi] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ()
keyword[return] identifier[umi] , identifier[cell]
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
|
def get_barcode_umis(read, cell_barcode=False):
""" extract the umi +/- cell barcode from the read name where the barcodes
were extracted using umis"""
(umi, cell) = (None, None)
try:
read_name_elements = read.qname.split(':')
for element in read_name_elements:
if element.startswith('UMI_'):
umi = element[4:].encode('utf-8') # depends on [control=['if'], data=[]]
elif element.startswith('CELL_') and cell_barcode:
cell = element[5:].encode('utf-8') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']]
if umi is None:
raise ValueError() # depends on [control=['if'], data=[]]
return (umi, cell) # depends on [control=['try'], data=[]]
except:
raise ValueError('Could not extract UMI +/- cell barcode from the read tag') # depends on [control=['except'], data=[]]
|
def makeMarkovApproxToNormal(x_grid,mu,sigma,K=351,bound=3.5):
'''
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, returning a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
K: int
Number of points in the normal distribution to sample.
bound: float
Truncation bound of the normal distribution, as +/- bound*sigma.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
'''
x_n = x_grid.size # Number of points in the outcome grid
lower_bound = -bound # Lower bound of normal draws to consider, in SD
upper_bound = bound # Upper bound of normal draws to consider, in SD
raw_sample = np.linspace(lower_bound,upper_bound,K) # Evenly spaced draws between bounds
f_weights = stats.norm.pdf(raw_sample) # Relative probability of each draw
sample = mu + sigma*raw_sample # Adjusted bounds, given mean and stdev
w_vec = np.zeros(x_n) # A vector of outcome weights
# Find the relative position of each of the draws
sample_pos = np.searchsorted(x_grid,sample)
sample_pos[sample_pos < 1] = 1
sample_pos[sample_pos > x_n-1] = x_n-1
# Make arrays of the x_grid point directly above and below each draw
bot = x_grid[sample_pos-1]
top = x_grid[sample_pos]
alpha = (sample-bot)/(top-bot)
# Keep the weights (alpha) in bounds
alpha_clipped = np.clip(alpha,0.,1.)
# Loop through each x_grid point and add up the probability that each nearby
# draw contributes to it (accounting for distance)
for j in range(1,x_n):
c = sample_pos == j
w_vec[j-1] = w_vec[j-1] + np.dot(f_weights[c],1.0-alpha_clipped[c])
w_vec[j] = w_vec[j] + np.dot(f_weights[c],alpha_clipped[c])
# Reweight the probabilities so they sum to 1
W = np.sum(w_vec)
p_vec = w_vec/W
# Check for obvious errors, and return p_vec
assert (np.all(p_vec>=0.)) and (np.all(p_vec<=1.)) and (np.isclose(np.sum(p_vec),1.))
return p_vec
|
def function[makeMarkovApproxToNormal, parameter[x_grid, mu, sigma, K, bound]]:
constant[
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, returning a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
K: int
Number of points in the normal distribution to sample.
bound: float
Truncation bound of the normal distribution, as +/- bound*sigma.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
]
variable[x_n] assign[=] name[x_grid].size
variable[lower_bound] assign[=] <ast.UnaryOp object at 0x7da1b23475b0>
variable[upper_bound] assign[=] name[bound]
variable[raw_sample] assign[=] call[name[np].linspace, parameter[name[lower_bound], name[upper_bound], name[K]]]
variable[f_weights] assign[=] call[name[stats].norm.pdf, parameter[name[raw_sample]]]
variable[sample] assign[=] binary_operation[name[mu] + binary_operation[name[sigma] * name[raw_sample]]]
variable[w_vec] assign[=] call[name[np].zeros, parameter[name[x_n]]]
variable[sample_pos] assign[=] call[name[np].searchsorted, parameter[name[x_grid], name[sample]]]
call[name[sample_pos]][compare[name[sample_pos] less[<] constant[1]]] assign[=] constant[1]
call[name[sample_pos]][compare[name[sample_pos] greater[>] binary_operation[name[x_n] - constant[1]]]] assign[=] binary_operation[name[x_n] - constant[1]]
variable[bot] assign[=] call[name[x_grid]][binary_operation[name[sample_pos] - constant[1]]]
variable[top] assign[=] call[name[x_grid]][name[sample_pos]]
variable[alpha] assign[=] binary_operation[binary_operation[name[sample] - name[bot]] / binary_operation[name[top] - name[bot]]]
variable[alpha_clipped] assign[=] call[name[np].clip, parameter[name[alpha], constant[0.0], constant[1.0]]]
for taget[name[j]] in starred[call[name[range], parameter[constant[1], name[x_n]]]] begin[:]
variable[c] assign[=] compare[name[sample_pos] equal[==] name[j]]
call[name[w_vec]][binary_operation[name[j] - constant[1]]] assign[=] binary_operation[call[name[w_vec]][binary_operation[name[j] - constant[1]]] + call[name[np].dot, parameter[call[name[f_weights]][name[c]], binary_operation[constant[1.0] - call[name[alpha_clipped]][name[c]]]]]]
call[name[w_vec]][name[j]] assign[=] binary_operation[call[name[w_vec]][name[j]] + call[name[np].dot, parameter[call[name[f_weights]][name[c]], call[name[alpha_clipped]][name[c]]]]]
variable[W] assign[=] call[name[np].sum, parameter[name[w_vec]]]
variable[p_vec] assign[=] binary_operation[name[w_vec] / name[W]]
assert[<ast.BoolOp object at 0x7da20e9b1270>]
return[name[p_vec]]
|
keyword[def] identifier[makeMarkovApproxToNormal] ( identifier[x_grid] , identifier[mu] , identifier[sigma] , identifier[K] = literal[int] , identifier[bound] = literal[int] ):
literal[string]
identifier[x_n] = identifier[x_grid] . identifier[size]
identifier[lower_bound] =- identifier[bound]
identifier[upper_bound] = identifier[bound]
identifier[raw_sample] = identifier[np] . identifier[linspace] ( identifier[lower_bound] , identifier[upper_bound] , identifier[K] )
identifier[f_weights] = identifier[stats] . identifier[norm] . identifier[pdf] ( identifier[raw_sample] )
identifier[sample] = identifier[mu] + identifier[sigma] * identifier[raw_sample]
identifier[w_vec] = identifier[np] . identifier[zeros] ( identifier[x_n] )
identifier[sample_pos] = identifier[np] . identifier[searchsorted] ( identifier[x_grid] , identifier[sample] )
identifier[sample_pos] [ identifier[sample_pos] < literal[int] ]= literal[int]
identifier[sample_pos] [ identifier[sample_pos] > identifier[x_n] - literal[int] ]= identifier[x_n] - literal[int]
identifier[bot] = identifier[x_grid] [ identifier[sample_pos] - literal[int] ]
identifier[top] = identifier[x_grid] [ identifier[sample_pos] ]
identifier[alpha] =( identifier[sample] - identifier[bot] )/( identifier[top] - identifier[bot] )
identifier[alpha_clipped] = identifier[np] . identifier[clip] ( identifier[alpha] , literal[int] , literal[int] )
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[x_n] ):
identifier[c] = identifier[sample_pos] == identifier[j]
identifier[w_vec] [ identifier[j] - literal[int] ]= identifier[w_vec] [ identifier[j] - literal[int] ]+ identifier[np] . identifier[dot] ( identifier[f_weights] [ identifier[c] ], literal[int] - identifier[alpha_clipped] [ identifier[c] ])
identifier[w_vec] [ identifier[j] ]= identifier[w_vec] [ identifier[j] ]+ identifier[np] . identifier[dot] ( identifier[f_weights] [ identifier[c] ], identifier[alpha_clipped] [ identifier[c] ])
identifier[W] = identifier[np] . identifier[sum] ( identifier[w_vec] )
identifier[p_vec] = identifier[w_vec] / identifier[W]
keyword[assert] ( identifier[np] . identifier[all] ( identifier[p_vec] >= literal[int] )) keyword[and] ( identifier[np] . identifier[all] ( identifier[p_vec] <= literal[int] )) keyword[and] ( identifier[np] . identifier[isclose] ( identifier[np] . identifier[sum] ( identifier[p_vec] ), literal[int] ))
keyword[return] identifier[p_vec]
|
def makeMarkovApproxToNormal(x_grid, mu, sigma, K=351, bound=3.5):
"""
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, returning a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
K: int
Number of points in the normal distribution to sample.
bound: float
Truncation bound of the normal distribution, as +/- bound*sigma.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
"""
x_n = x_grid.size # Number of points in the outcome grid
lower_bound = -bound # Lower bound of normal draws to consider, in SD
upper_bound = bound # Upper bound of normal draws to consider, in SD
raw_sample = np.linspace(lower_bound, upper_bound, K) # Evenly spaced draws between bounds
f_weights = stats.norm.pdf(raw_sample) # Relative probability of each draw
sample = mu + sigma * raw_sample # Adjusted bounds, given mean and stdev
w_vec = np.zeros(x_n) # A vector of outcome weights
# Find the relative position of each of the draws
sample_pos = np.searchsorted(x_grid, sample)
sample_pos[sample_pos < 1] = 1
sample_pos[sample_pos > x_n - 1] = x_n - 1
# Make arrays of the x_grid point directly above and below each draw
bot = x_grid[sample_pos - 1]
top = x_grid[sample_pos]
alpha = (sample - bot) / (top - bot)
# Keep the weights (alpha) in bounds
alpha_clipped = np.clip(alpha, 0.0, 1.0)
# Loop through each x_grid point and add up the probability that each nearby
# draw contributes to it (accounting for distance)
for j in range(1, x_n):
c = sample_pos == j
w_vec[j - 1] = w_vec[j - 1] + np.dot(f_weights[c], 1.0 - alpha_clipped[c])
w_vec[j] = w_vec[j] + np.dot(f_weights[c], alpha_clipped[c]) # depends on [control=['for'], data=['j']]
# Reweight the probabilities so they sum to 1
W = np.sum(w_vec)
p_vec = w_vec / W
# Check for obvious errors, and return p_vec
assert np.all(p_vec >= 0.0) and np.all(p_vec <= 1.0) and np.isclose(np.sum(p_vec), 1.0)
return p_vec
|
def integrate(self, mass_min, mass_max, log_mode=True, weight=False, steps=1e4):
""" Numerical Riemannn integral of the IMF (stupid simple).
Parameters:
-----------
mass_min: minimum mass bound for integration (solar masses)
mass_max: maximum mass bound for integration (solar masses)
log_mode[True]: use logarithmic steps in stellar mass as oppose to linear
weight[False]: weight the integral by stellar mass
steps: number of numerical integration steps
Returns:
--------
result of integral
"""
if log_mode:
d_log_mass = (np.log10(mass_max) - np.log10(mass_min)) / float(steps)
log_mass = np.linspace(np.log10(mass_min), np.log10(mass_max), steps)
mass = 10.**log_mass
if weight:
return np.sum(mass * d_log_mass * self.pdf(mass, log_mode=True))
else:
return np.sum(d_log_mass * self.pdf(mass, log_mode=True))
else:
d_mass = (mass_max - mass_min) / float(steps)
mass = np.linspace(mass_min, mass_max, steps)
if weight:
return np.sum(mass * d_mass * self.pdf(mass, log_mode=False))
else:
return np.sum(d_mass * self.pdf(mass, log_mode=False))
|
def function[integrate, parameter[self, mass_min, mass_max, log_mode, weight, steps]]:
constant[ Numerical Riemannn integral of the IMF (stupid simple).
Parameters:
-----------
mass_min: minimum mass bound for integration (solar masses)
mass_max: maximum mass bound for integration (solar masses)
log_mode[True]: use logarithmic steps in stellar mass as oppose to linear
weight[False]: weight the integral by stellar mass
steps: number of numerical integration steps
Returns:
--------
result of integral
]
if name[log_mode] begin[:]
variable[d_log_mass] assign[=] binary_operation[binary_operation[call[name[np].log10, parameter[name[mass_max]]] - call[name[np].log10, parameter[name[mass_min]]]] / call[name[float], parameter[name[steps]]]]
variable[log_mass] assign[=] call[name[np].linspace, parameter[call[name[np].log10, parameter[name[mass_min]]], call[name[np].log10, parameter[name[mass_max]]], name[steps]]]
variable[mass] assign[=] binary_operation[constant[10.0] ** name[log_mass]]
if name[weight] begin[:]
return[call[name[np].sum, parameter[binary_operation[binary_operation[name[mass] * name[d_log_mass]] * call[name[self].pdf, parameter[name[mass]]]]]]]
|
keyword[def] identifier[integrate] ( identifier[self] , identifier[mass_min] , identifier[mass_max] , identifier[log_mode] = keyword[True] , identifier[weight] = keyword[False] , identifier[steps] = literal[int] ):
literal[string]
keyword[if] identifier[log_mode] :
identifier[d_log_mass] =( identifier[np] . identifier[log10] ( identifier[mass_max] )- identifier[np] . identifier[log10] ( identifier[mass_min] ))/ identifier[float] ( identifier[steps] )
identifier[log_mass] = identifier[np] . identifier[linspace] ( identifier[np] . identifier[log10] ( identifier[mass_min] ), identifier[np] . identifier[log10] ( identifier[mass_max] ), identifier[steps] )
identifier[mass] = literal[int] ** identifier[log_mass]
keyword[if] identifier[weight] :
keyword[return] identifier[np] . identifier[sum] ( identifier[mass] * identifier[d_log_mass] * identifier[self] . identifier[pdf] ( identifier[mass] , identifier[log_mode] = keyword[True] ))
keyword[else] :
keyword[return] identifier[np] . identifier[sum] ( identifier[d_log_mass] * identifier[self] . identifier[pdf] ( identifier[mass] , identifier[log_mode] = keyword[True] ))
keyword[else] :
identifier[d_mass] =( identifier[mass_max] - identifier[mass_min] )/ identifier[float] ( identifier[steps] )
identifier[mass] = identifier[np] . identifier[linspace] ( identifier[mass_min] , identifier[mass_max] , identifier[steps] )
keyword[if] identifier[weight] :
keyword[return] identifier[np] . identifier[sum] ( identifier[mass] * identifier[d_mass] * identifier[self] . identifier[pdf] ( identifier[mass] , identifier[log_mode] = keyword[False] ))
keyword[else] :
keyword[return] identifier[np] . identifier[sum] ( identifier[d_mass] * identifier[self] . identifier[pdf] ( identifier[mass] , identifier[log_mode] = keyword[False] ))
|
def integrate(self, mass_min, mass_max, log_mode=True, weight=False, steps=10000.0):
""" Numerical Riemannn integral of the IMF (stupid simple).
Parameters:
-----------
mass_min: minimum mass bound for integration (solar masses)
mass_max: maximum mass bound for integration (solar masses)
log_mode[True]: use logarithmic steps in stellar mass as oppose to linear
weight[False]: weight the integral by stellar mass
steps: number of numerical integration steps
Returns:
--------
result of integral
"""
if log_mode:
d_log_mass = (np.log10(mass_max) - np.log10(mass_min)) / float(steps)
log_mass = np.linspace(np.log10(mass_min), np.log10(mass_max), steps)
mass = 10.0 ** log_mass
if weight:
return np.sum(mass * d_log_mass * self.pdf(mass, log_mode=True)) # depends on [control=['if'], data=[]]
else:
return np.sum(d_log_mass * self.pdf(mass, log_mode=True)) # depends on [control=['if'], data=[]]
else:
d_mass = (mass_max - mass_min) / float(steps)
mass = np.linspace(mass_min, mass_max, steps)
if weight:
return np.sum(mass * d_mass * self.pdf(mass, log_mode=False)) # depends on [control=['if'], data=[]]
else:
return np.sum(d_mass * self.pdf(mass, log_mode=False))
|
def at_depth(self, level):
"""
Locate the last config item at a specified depth
"""
return Zconfig(lib.zconfig_at_depth(self._as_parameter_, level), False)
|
def function[at_depth, parameter[self, level]]:
constant[
Locate the last config item at a specified depth
]
return[call[name[Zconfig], parameter[call[name[lib].zconfig_at_depth, parameter[name[self]._as_parameter_, name[level]]], constant[False]]]]
|
keyword[def] identifier[at_depth] ( identifier[self] , identifier[level] ):
literal[string]
keyword[return] identifier[Zconfig] ( identifier[lib] . identifier[zconfig_at_depth] ( identifier[self] . identifier[_as_parameter_] , identifier[level] ), keyword[False] )
|
def at_depth(self, level):
"""
Locate the last config item at a specified depth
"""
return Zconfig(lib.zconfig_at_depth(self._as_parameter_, level), False)
|
def pre_run_cell(self, cellno, code):
"""Executes before the user-entered code in `ipython` is run. This
intercepts loops and other problematic code that would produce lots of
database entries and streamlines it to produce only a single entry.
Args:
cellno (int): the cell number that is about to be executed.
code (str): python source code that is about to be executed.
"""
#First, we look for loops and list/dict comprehensions in the code. Find
#the id of the latest cell that was executed.
self.cellid = cellno
#If there is a loop somewhere in the code, it could generate millions of
#database entries and make the notebook unusable.
import ast
if findloop(ast.parse(code)):
#Disable the acorn logging systems so that we don't pollute the
#database.
from acorn.logging.decoration import set_streamlining
set_streamlining(True)
#Create the pre-execute entry for the database.
from time import time
self.pre = {
"m": "loop",
"a": None,
"s": time(),
"r": None,
"c": code,
}
|
def function[pre_run_cell, parameter[self, cellno, code]]:
constant[Executes before the user-entered code in `ipython` is run. This
intercepts loops and other problematic code that would produce lots of
database entries and streamlines it to produce only a single entry.
Args:
cellno (int): the cell number that is about to be executed.
code (str): python source code that is about to be executed.
]
name[self].cellid assign[=] name[cellno]
import module[ast]
if call[name[findloop], parameter[call[name[ast].parse, parameter[name[code]]]]] begin[:]
from relative_module[acorn.logging.decoration] import module[set_streamlining]
call[name[set_streamlining], parameter[constant[True]]]
from relative_module[time] import module[time]
name[self].pre assign[=] dictionary[[<ast.Constant object at 0x7da1b146ca30>, <ast.Constant object at 0x7da1b146ebc0>, <ast.Constant object at 0x7da1b146e9e0>, <ast.Constant object at 0x7da1b146d1b0>, <ast.Constant object at 0x7da1b146c820>], [<ast.Constant object at 0x7da1b146c850>, <ast.Constant object at 0x7da1b146ee60>, <ast.Call object at 0x7da1b146c6a0>, <ast.Constant object at 0x7da1b146db10>, <ast.Name object at 0x7da1b146d420>]]
|
keyword[def] identifier[pre_run_cell] ( identifier[self] , identifier[cellno] , identifier[code] ):
literal[string]
identifier[self] . identifier[cellid] = identifier[cellno]
keyword[import] identifier[ast]
keyword[if] identifier[findloop] ( identifier[ast] . identifier[parse] ( identifier[code] )):
keyword[from] identifier[acorn] . identifier[logging] . identifier[decoration] keyword[import] identifier[set_streamlining]
identifier[set_streamlining] ( keyword[True] )
keyword[from] identifier[time] keyword[import] identifier[time]
identifier[self] . identifier[pre] ={
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : identifier[time] (),
literal[string] : keyword[None] ,
literal[string] : identifier[code] ,
}
|
def pre_run_cell(self, cellno, code):
"""Executes before the user-entered code in `ipython` is run. This
intercepts loops and other problematic code that would produce lots of
database entries and streamlines it to produce only a single entry.
Args:
cellno (int): the cell number that is about to be executed.
code (str): python source code that is about to be executed.
"""
#First, we look for loops and list/dict comprehensions in the code. Find
#the id of the latest cell that was executed.
self.cellid = cellno
#If there is a loop somewhere in the code, it could generate millions of
#database entries and make the notebook unusable.
import ast
if findloop(ast.parse(code)):
#Disable the acorn logging systems so that we don't pollute the
#database.
from acorn.logging.decoration import set_streamlining
set_streamlining(True)
#Create the pre-execute entry for the database.
from time import time
self.pre = {'m': 'loop', 'a': None, 's': time(), 'r': None, 'c': code} # depends on [control=['if'], data=[]]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.