code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def get_output_fields(self):
""" Get field names from output template.
"""
# Re-engineer list from output format
# XXX TODO: Would be better to use a FieldRecorder class to catch the full field names
emit_fields = list(i.lower() for i in re.sub(r"[^_A-Z]+", ' ', self.format_item(None)).split())
# Validate result
result = []
for name in emit_fields[:]:
if name not in engine.FieldDefinition.FIELDS:
self.LOG.warn("Omitted unknown name '%s' from statistics and output format sorting" % name)
else:
result.append(name)
return result
|
def function[get_output_fields, parameter[self]]:
constant[ Get field names from output template.
]
variable[emit_fields] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da18ede63b0>]]
variable[result] assign[=] list[[]]
for taget[name[name]] in starred[call[name[emit_fields]][<ast.Slice object at 0x7da18bc71960>]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[engine].FieldDefinition.FIELDS] begin[:]
call[name[self].LOG.warn, parameter[binary_operation[constant[Omitted unknown name '%s' from statistics and output format sorting] <ast.Mod object at 0x7da2590d6920> name[name]]]]
return[name[result]]
|
keyword[def] identifier[get_output_fields] ( identifier[self] ):
literal[string]
identifier[emit_fields] = identifier[list] ( identifier[i] . identifier[lower] () keyword[for] identifier[i] keyword[in] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[self] . identifier[format_item] ( keyword[None] )). identifier[split] ())
identifier[result] =[]
keyword[for] identifier[name] keyword[in] identifier[emit_fields] [:]:
keyword[if] identifier[name] keyword[not] keyword[in] identifier[engine] . identifier[FieldDefinition] . identifier[FIELDS] :
identifier[self] . identifier[LOG] . identifier[warn] ( literal[string] % identifier[name] )
keyword[else] :
identifier[result] . identifier[append] ( identifier[name] )
keyword[return] identifier[result]
|
def get_output_fields(self):
""" Get field names from output template.
"""
# Re-engineer list from output format
# XXX TODO: Would be better to use a FieldRecorder class to catch the full field names
emit_fields = list((i.lower() for i in re.sub('[^_A-Z]+', ' ', self.format_item(None)).split()))
# Validate result
result = []
for name in emit_fields[:]:
if name not in engine.FieldDefinition.FIELDS:
self.LOG.warn("Omitted unknown name '%s' from statistics and output format sorting" % name) # depends on [control=['if'], data=['name']]
else:
result.append(name) # depends on [control=['for'], data=['name']]
return result
|
def finalize():
"""A function that should be called after parsing all Gin config files.
Calling this function allows registered "finalize hooks" to inspect (and
potentially modify) the Gin config, to provide additional functionality. Hooks
should not modify the configuration object they receive directly; instead,
they should return a dictionary mapping Gin binding keys to (new or updated)
values. This way, all hooks see the config as originally parsed.
Raises:
RuntimeError: If the config is already locked.
ValueError: If two or more hooks attempt to modify or introduce bindings for
the same key. Since it is difficult to control the order in which hooks
are registered, allowing this could yield unpredictable behavior.
"""
if config_is_locked():
raise RuntimeError('Finalize called twice (config already locked).')
bindings = {}
for hook in _FINALIZE_HOOKS:
new_bindings = hook(_CONFIG)
if new_bindings is not None:
for key, value in six.iteritems(new_bindings):
pbk = ParsedBindingKey(key)
if pbk in bindings:
err_str = 'Received conflicting updates when running {}.'
raise ValueError(err_str.format(hook))
bindings[pbk] = value
for pbk, value in six.iteritems(bindings):
bind_parameter(pbk, value)
_set_config_is_locked(True)
|
def function[finalize, parameter[]]:
constant[A function that should be called after parsing all Gin config files.
Calling this function allows registered "finalize hooks" to inspect (and
potentially modify) the Gin config, to provide additional functionality. Hooks
should not modify the configuration object they receive directly; instead,
they should return a dictionary mapping Gin binding keys to (new or updated)
values. This way, all hooks see the config as originally parsed.
Raises:
RuntimeError: If the config is already locked.
ValueError: If two or more hooks attempt to modify or introduce bindings for
the same key. Since it is difficult to control the order in which hooks
are registered, allowing this could yield unpredictable behavior.
]
if call[name[config_is_locked], parameter[]] begin[:]
<ast.Raise object at 0x7da1b02842e0>
variable[bindings] assign[=] dictionary[[], []]
for taget[name[hook]] in starred[name[_FINALIZE_HOOKS]] begin[:]
variable[new_bindings] assign[=] call[name[hook], parameter[name[_CONFIG]]]
if compare[name[new_bindings] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0284f40>, <ast.Name object at 0x7da1b0284eb0>]]] in starred[call[name[six].iteritems, parameter[name[new_bindings]]]] begin[:]
variable[pbk] assign[=] call[name[ParsedBindingKey], parameter[name[key]]]
if compare[name[pbk] in name[bindings]] begin[:]
variable[err_str] assign[=] constant[Received conflicting updates when running {}.]
<ast.Raise object at 0x7da1b0285540>
call[name[bindings]][name[pbk]] assign[=] name[value]
for taget[tuple[[<ast.Name object at 0x7da1b02841c0>, <ast.Name object at 0x7da1b0284100>]]] in starred[call[name[six].iteritems, parameter[name[bindings]]]] begin[:]
call[name[bind_parameter], parameter[name[pbk], name[value]]]
call[name[_set_config_is_locked], parameter[constant[True]]]
|
keyword[def] identifier[finalize] ():
literal[string]
keyword[if] identifier[config_is_locked] ():
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[bindings] ={}
keyword[for] identifier[hook] keyword[in] identifier[_FINALIZE_HOOKS] :
identifier[new_bindings] = identifier[hook] ( identifier[_CONFIG] )
keyword[if] identifier[new_bindings] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[new_bindings] ):
identifier[pbk] = identifier[ParsedBindingKey] ( identifier[key] )
keyword[if] identifier[pbk] keyword[in] identifier[bindings] :
identifier[err_str] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[err_str] . identifier[format] ( identifier[hook] ))
identifier[bindings] [ identifier[pbk] ]= identifier[value]
keyword[for] identifier[pbk] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[bindings] ):
identifier[bind_parameter] ( identifier[pbk] , identifier[value] )
identifier[_set_config_is_locked] ( keyword[True] )
|
def finalize():
"""A function that should be called after parsing all Gin config files.
Calling this function allows registered "finalize hooks" to inspect (and
potentially modify) the Gin config, to provide additional functionality. Hooks
should not modify the configuration object they receive directly; instead,
they should return a dictionary mapping Gin binding keys to (new or updated)
values. This way, all hooks see the config as originally parsed.
Raises:
RuntimeError: If the config is already locked.
ValueError: If two or more hooks attempt to modify or introduce bindings for
the same key. Since it is difficult to control the order in which hooks
are registered, allowing this could yield unpredictable behavior.
"""
if config_is_locked():
raise RuntimeError('Finalize called twice (config already locked).') # depends on [control=['if'], data=[]]
bindings = {}
for hook in _FINALIZE_HOOKS:
new_bindings = hook(_CONFIG)
if new_bindings is not None:
for (key, value) in six.iteritems(new_bindings):
pbk = ParsedBindingKey(key)
if pbk in bindings:
err_str = 'Received conflicting updates when running {}.'
raise ValueError(err_str.format(hook)) # depends on [control=['if'], data=[]]
bindings[pbk] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['new_bindings']] # depends on [control=['for'], data=['hook']]
for (pbk, value) in six.iteritems(bindings):
bind_parameter(pbk, value) # depends on [control=['for'], data=[]]
_set_config_is_locked(True)
|
def OnReorder(self, event):
"""Given a request to reorder, tell us to reorder"""
column = self.columns[event.GetColumn()]
return self.ReorderByColumn( column )
|
def function[OnReorder, parameter[self, event]]:
constant[Given a request to reorder, tell us to reorder]
variable[column] assign[=] call[name[self].columns][call[name[event].GetColumn, parameter[]]]
return[call[name[self].ReorderByColumn, parameter[name[column]]]]
|
keyword[def] identifier[OnReorder] ( identifier[self] , identifier[event] ):
literal[string]
identifier[column] = identifier[self] . identifier[columns] [ identifier[event] . identifier[GetColumn] ()]
keyword[return] identifier[self] . identifier[ReorderByColumn] ( identifier[column] )
|
def OnReorder(self, event):
"""Given a request to reorder, tell us to reorder"""
column = self.columns[event.GetColumn()]
return self.ReorderByColumn(column)
|
def feature_analysis(fname="feature_analysis.png"):
"""
Create figures for feature analysis
"""
# Create side-by-side axes grid
_, axes = plt.subplots(ncols=2, figsize=(18,6))
# Draw RadViz on the left
data = load_occupancy(split=False)
oz = RadViz(ax=axes[0], classes=["unoccupied", "occupied"])
oz.fit(data.X, data.y)
oz.finalize()
# Draw Rank2D on the right
data = load_concrete(split=False)
oz = Rank2D(ax=axes[1])
oz.fit_transform(data.X, data.y)
oz.finalize()
# Save figure
path = os.path.join(FIGURES, fname)
plt.tight_layout()
plt.savefig(path)
|
def function[feature_analysis, parameter[fname]]:
constant[
Create figures for feature analysis
]
<ast.Tuple object at 0x7da20c6e5cc0> assign[=] call[name[plt].subplots, parameter[]]
variable[data] assign[=] call[name[load_occupancy], parameter[]]
variable[oz] assign[=] call[name[RadViz], parameter[]]
call[name[oz].fit, parameter[name[data].X, name[data].y]]
call[name[oz].finalize, parameter[]]
variable[data] assign[=] call[name[load_concrete], parameter[]]
variable[oz] assign[=] call[name[Rank2D], parameter[]]
call[name[oz].fit_transform, parameter[name[data].X, name[data].y]]
call[name[oz].finalize, parameter[]]
variable[path] assign[=] call[name[os].path.join, parameter[name[FIGURES], name[fname]]]
call[name[plt].tight_layout, parameter[]]
call[name[plt].savefig, parameter[name[path]]]
|
keyword[def] identifier[feature_analysis] ( identifier[fname] = literal[string] ):
literal[string]
identifier[_] , identifier[axes] = identifier[plt] . identifier[subplots] ( identifier[ncols] = literal[int] , identifier[figsize] =( literal[int] , literal[int] ))
identifier[data] = identifier[load_occupancy] ( identifier[split] = keyword[False] )
identifier[oz] = identifier[RadViz] ( identifier[ax] = identifier[axes] [ literal[int] ], identifier[classes] =[ literal[string] , literal[string] ])
identifier[oz] . identifier[fit] ( identifier[data] . identifier[X] , identifier[data] . identifier[y] )
identifier[oz] . identifier[finalize] ()
identifier[data] = identifier[load_concrete] ( identifier[split] = keyword[False] )
identifier[oz] = identifier[Rank2D] ( identifier[ax] = identifier[axes] [ literal[int] ])
identifier[oz] . identifier[fit_transform] ( identifier[data] . identifier[X] , identifier[data] . identifier[y] )
identifier[oz] . identifier[finalize] ()
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[FIGURES] , identifier[fname] )
identifier[plt] . identifier[tight_layout] ()
identifier[plt] . identifier[savefig] ( identifier[path] )
|
def feature_analysis(fname='feature_analysis.png'):
"""
Create figures for feature analysis
"""
# Create side-by-side axes grid
(_, axes) = plt.subplots(ncols=2, figsize=(18, 6))
# Draw RadViz on the left
data = load_occupancy(split=False)
oz = RadViz(ax=axes[0], classes=['unoccupied', 'occupied'])
oz.fit(data.X, data.y)
oz.finalize()
# Draw Rank2D on the right
data = load_concrete(split=False)
oz = Rank2D(ax=axes[1])
oz.fit_transform(data.X, data.y)
oz.finalize()
# Save figure
path = os.path.join(FIGURES, fname)
plt.tight_layout()
plt.savefig(path)
|
def _build_headers(self, method, auth_session):
"""Create headers for the request.
Parameters
method (str)
HTTP method (e.g. 'POST').
auth_session (Session)
The Session object containing OAuth 2.0 credentials.
Returns
headers (dict)
Dictionary of access headers to attach to request.
Raises
LyftIllegalState (ApiError)
Raised if headers are invalid.
"""
token_type = auth_session.token_type
token = auth_session.oauth2credential.access_token
if not self._authorization_headers_valid(token_type, token):
message = 'Invalid token_type or token.'
raise LyftIllegalState(message)
headers = {
'Authorization': ' '.join([token_type, token]),
}
if method in http.BODY_METHODS:
headers.update(http.DEFAULT_CONTENT_HEADERS)
return headers
|
def function[_build_headers, parameter[self, method, auth_session]]:
constant[Create headers for the request.
Parameters
method (str)
HTTP method (e.g. 'POST').
auth_session (Session)
The Session object containing OAuth 2.0 credentials.
Returns
headers (dict)
Dictionary of access headers to attach to request.
Raises
LyftIllegalState (ApiError)
Raised if headers are invalid.
]
variable[token_type] assign[=] name[auth_session].token_type
variable[token] assign[=] name[auth_session].oauth2credential.access_token
if <ast.UnaryOp object at 0x7da2041daad0> begin[:]
variable[message] assign[=] constant[Invalid token_type or token.]
<ast.Raise object at 0x7da2041d8130>
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da2041daa10>], [<ast.Call object at 0x7da1b0a6cd90>]]
if compare[name[method] in name[http].BODY_METHODS] begin[:]
call[name[headers].update, parameter[name[http].DEFAULT_CONTENT_HEADERS]]
return[name[headers]]
|
keyword[def] identifier[_build_headers] ( identifier[self] , identifier[method] , identifier[auth_session] ):
literal[string]
identifier[token_type] = identifier[auth_session] . identifier[token_type]
identifier[token] = identifier[auth_session] . identifier[oauth2credential] . identifier[access_token]
keyword[if] keyword[not] identifier[self] . identifier[_authorization_headers_valid] ( identifier[token_type] , identifier[token] ):
identifier[message] = literal[string]
keyword[raise] identifier[LyftIllegalState] ( identifier[message] )
identifier[headers] ={
literal[string] : literal[string] . identifier[join] ([ identifier[token_type] , identifier[token] ]),
}
keyword[if] identifier[method] keyword[in] identifier[http] . identifier[BODY_METHODS] :
identifier[headers] . identifier[update] ( identifier[http] . identifier[DEFAULT_CONTENT_HEADERS] )
keyword[return] identifier[headers]
|
def _build_headers(self, method, auth_session):
"""Create headers for the request.
Parameters
method (str)
HTTP method (e.g. 'POST').
auth_session (Session)
The Session object containing OAuth 2.0 credentials.
Returns
headers (dict)
Dictionary of access headers to attach to request.
Raises
LyftIllegalState (ApiError)
Raised if headers are invalid.
"""
token_type = auth_session.token_type
token = auth_session.oauth2credential.access_token
if not self._authorization_headers_valid(token_type, token):
message = 'Invalid token_type or token.'
raise LyftIllegalState(message) # depends on [control=['if'], data=[]]
headers = {'Authorization': ' '.join([token_type, token])}
if method in http.BODY_METHODS:
headers.update(http.DEFAULT_CONTENT_HEADERS) # depends on [control=['if'], data=[]]
return headers
|
def _get_progress(self):
"""
Get current progress of emerge.
Returns a dict containing current and total value.
"""
input_data = []
ret = {}
# traverse emerge.log from bottom up to get latest information
last_lines = self.py3.command_output(["tail", "-50", self.emerge_log_file])
input_data = last_lines.split("\n")
input_data.reverse()
for line in input_data:
if "*** terminating." in line:
# copy content of ret_default, not only the references
ret = copy.deepcopy(self.ret_default)
break
else:
status_re = re.compile(
"\((?P<cu>[\d]+) of (?P<t>[\d]+)\) "
"(?P<a>[a-zA-Z\/]+( [a-zA-Z]+)?) "
"\((?P<ca>[\w\-]+)\/(?P<p>[\w\.]+)"
)
res = status_re.search(line)
if res is not None:
ret["action"] = res.group("a").lower()
ret["category"] = res.group("ca")
ret["current"] = res.group("cu")
ret["pkg"] = res.group("p")
ret["total"] = res.group("t")
break
return ret
|
def function[_get_progress, parameter[self]]:
constant[
Get current progress of emerge.
Returns a dict containing current and total value.
]
variable[input_data] assign[=] list[[]]
variable[ret] assign[=] dictionary[[], []]
variable[last_lines] assign[=] call[name[self].py3.command_output, parameter[list[[<ast.Constant object at 0x7da18bc71ff0>, <ast.Constant object at 0x7da18bc72410>, <ast.Attribute object at 0x7da18bc71c00>]]]]
variable[input_data] assign[=] call[name[last_lines].split, parameter[constant[
]]]
call[name[input_data].reverse, parameter[]]
for taget[name[line]] in starred[name[input_data]] begin[:]
if compare[constant[*** terminating.] in name[line]] begin[:]
variable[ret] assign[=] call[name[copy].deepcopy, parameter[name[self].ret_default]]
break
return[name[ret]]
|
keyword[def] identifier[_get_progress] ( identifier[self] ):
literal[string]
identifier[input_data] =[]
identifier[ret] ={}
identifier[last_lines] = identifier[self] . identifier[py3] . identifier[command_output] ([ literal[string] , literal[string] , identifier[self] . identifier[emerge_log_file] ])
identifier[input_data] = identifier[last_lines] . identifier[split] ( literal[string] )
identifier[input_data] . identifier[reverse] ()
keyword[for] identifier[line] keyword[in] identifier[input_data] :
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[ret] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[ret_default] )
keyword[break]
keyword[else] :
identifier[status_re] = identifier[re] . identifier[compile] (
literal[string]
literal[string]
literal[string]
)
identifier[res] = identifier[status_re] . identifier[search] ( identifier[line] )
keyword[if] identifier[res] keyword[is] keyword[not] keyword[None] :
identifier[ret] [ literal[string] ]= identifier[res] . identifier[group] ( literal[string] ). identifier[lower] ()
identifier[ret] [ literal[string] ]= identifier[res] . identifier[group] ( literal[string] )
identifier[ret] [ literal[string] ]= identifier[res] . identifier[group] ( literal[string] )
identifier[ret] [ literal[string] ]= identifier[res] . identifier[group] ( literal[string] )
identifier[ret] [ literal[string] ]= identifier[res] . identifier[group] ( literal[string] )
keyword[break]
keyword[return] identifier[ret]
|
def _get_progress(self):
"""
Get current progress of emerge.
Returns a dict containing current and total value.
"""
input_data = []
ret = {}
# traverse emerge.log from bottom up to get latest information
last_lines = self.py3.command_output(['tail', '-50', self.emerge_log_file])
input_data = last_lines.split('\n')
input_data.reverse()
for line in input_data:
if '*** terminating.' in line:
# copy content of ret_default, not only the references
ret = copy.deepcopy(self.ret_default)
break # depends on [control=['if'], data=[]]
else:
status_re = re.compile('\\((?P<cu>[\\d]+) of (?P<t>[\\d]+)\\) (?P<a>[a-zA-Z\\/]+( [a-zA-Z]+)?) \\((?P<ca>[\\w\\-]+)\\/(?P<p>[\\w\\.]+)')
res = status_re.search(line)
if res is not None:
ret['action'] = res.group('a').lower()
ret['category'] = res.group('ca')
ret['current'] = res.group('cu')
ret['pkg'] = res.group('p')
ret['total'] = res.group('t')
break # depends on [control=['if'], data=['res']] # depends on [control=['for'], data=['line']]
return ret
|
def draw(self, X, y, **kwargs):
"""
Called from the fit method, this method creates the radviz canvas and
draws each instance as a class or target colored point, whose location
is determined by the feature data set.
"""
# Convert from dataframe
if is_dataframe(X):
X = X.values
# Clean out nans and warn that the user they aren't plotted
nan_warnings.warn_if_nans_exist(X)
X, y = nan_warnings.filter_missing(X, y)
# Get the shape of the data
nrows, ncols = X.shape
# Set the axes limits
self.ax.set_xlim([-1,1])
self.ax.set_ylim([-1,1])
# Create the colors
# TODO: Allow both colormap, listed colors, and palette definition
# TODO: Make this an independent function or property for override!
color_values = resolve_colors(
n_colors=len(self.classes_), colormap=self.colormap, colors=self.color
)
self._colors = dict(zip(self.classes_, color_values))
# Create a data structure to hold scatter plot representations
to_plot = {}
for kls in self.classes_:
to_plot[kls] = [[], []]
# Compute the arcs around the circumference for each feature axis
# TODO: make this an independent function for override
s = np.array([
(np.cos(t), np.sin(t))
for t in [
2.0 * np.pi * (i / float(ncols))
for i in range(ncols)
]
])
# Compute the locations of the scatter plot for each class
# Normalize the data first to plot along the 0, 1 axis
for i, row in enumerate(self.normalize(X)):
row_ = np.repeat(np.expand_dims(row, axis=1), 2, axis=1)
xy = (s * row_).sum(axis=0) / row.sum()
kls = self.classes_[y[i]]
to_plot[kls][0].append(xy[0])
to_plot[kls][1].append(xy[1])
# Add the scatter plots from the to_plot function
# TODO: store these plots to add more instances to later
# TODO: make this a separate function
for i, kls in enumerate(self.classes_):
self.ax.scatter(
to_plot[kls][0], to_plot[kls][1], color=self._colors[kls],
label=str(kls), alpha=self.alpha, **kwargs
)
# Add the circular axis path
# TODO: Make this a seperate function (along with labeling)
self.ax.add_patch(patches.Circle(
(0.0, 0.0), radius=1.0, facecolor='none', edgecolor='grey', linewidth=.5
))
# Add the feature names
for xy, name in zip(s, self.features_):
# Add the patch indicating the location of the axis
self.ax.add_patch(patches.Circle(xy, radius=0.025, facecolor='#777777'))
# Add the feature names offset around the axis marker
if xy[0] < 0.0 and xy[1] < 0.0:
self.ax.text(xy[0] - 0.025, xy[1] - 0.025, name, ha='right', va='top', size='small')
elif xy[0] < 0.0 and xy[1] >= 0.0:
self.ax.text(xy[0] - 0.025, xy[1] + 0.025, name, ha='right', va='bottom', size='small')
elif xy[0] >= 0.0 and xy[1] < 0.0:
self.ax.text(xy[0] + 0.025, xy[1] - 0.025, name, ha='left', va='top', size='small')
elif xy[0] >= 0.0 and xy[1] >= 0.0:
self.ax.text(xy[0] + 0.025, xy[1] + 0.025, name, ha='left', va='bottom', size='small')
self.ax.axis('equal')
|
def function[draw, parameter[self, X, y]]:
constant[
Called from the fit method, this method creates the radviz canvas and
draws each instance as a class or target colored point, whose location
is determined by the feature data set.
]
if call[name[is_dataframe], parameter[name[X]]] begin[:]
variable[X] assign[=] name[X].values
call[name[nan_warnings].warn_if_nans_exist, parameter[name[X]]]
<ast.Tuple object at 0x7da18bcc9f30> assign[=] call[name[nan_warnings].filter_missing, parameter[name[X], name[y]]]
<ast.Tuple object at 0x7da18bcca440> assign[=] name[X].shape
call[name[self].ax.set_xlim, parameter[list[[<ast.UnaryOp object at 0x7da18bccba30>, <ast.Constant object at 0x7da18bccb370>]]]]
call[name[self].ax.set_ylim, parameter[list[[<ast.UnaryOp object at 0x7da18bccb220>, <ast.Constant object at 0x7da18bcc8dc0>]]]]
variable[color_values] assign[=] call[name[resolve_colors], parameter[]]
name[self]._colors assign[=] call[name[dict], parameter[call[name[zip], parameter[name[self].classes_, name[color_values]]]]]
variable[to_plot] assign[=] dictionary[[], []]
for taget[name[kls]] in starred[name[self].classes_] begin[:]
call[name[to_plot]][name[kls]] assign[=] list[[<ast.List object at 0x7da18bcc9210>, <ast.List object at 0x7da18bcc93c0>]]
variable[s] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da18bcca8c0>]]
for taget[tuple[[<ast.Name object at 0x7da18bcc8f40>, <ast.Name object at 0x7da18bccb4c0>]]] in starred[call[name[enumerate], parameter[call[name[self].normalize, parameter[name[X]]]]]] begin[:]
variable[row_] assign[=] call[name[np].repeat, parameter[call[name[np].expand_dims, parameter[name[row]]], constant[2]]]
variable[xy] assign[=] binary_operation[call[binary_operation[name[s] * name[row_]].sum, parameter[]] / call[name[row].sum, parameter[]]]
variable[kls] assign[=] call[name[self].classes_][call[name[y]][name[i]]]
call[call[call[name[to_plot]][name[kls]]][constant[0]].append, parameter[call[name[xy]][constant[0]]]]
call[call[call[name[to_plot]][name[kls]]][constant[1]].append, parameter[call[name[xy]][constant[1]]]]
for taget[tuple[[<ast.Name object at 0x7da20c7caa70>, <ast.Name object at 0x7da20c7cb220>]]] in starred[call[name[enumerate], parameter[name[self].classes_]]] begin[:]
call[name[self].ax.scatter, parameter[call[call[name[to_plot]][name[kls]]][constant[0]], call[call[name[to_plot]][name[kls]]][constant[1]]]]
call[name[self].ax.add_patch, parameter[call[name[patches].Circle, parameter[tuple[[<ast.Constant object at 0x7da20c7c8a30>, <ast.Constant object at 0x7da20c7c8070>]]]]]]
for taget[tuple[[<ast.Name object at 0x7da20c7cbc70>, <ast.Name object at 0x7da20c7c8550>]]] in starred[call[name[zip], parameter[name[s], name[self].features_]]] begin[:]
call[name[self].ax.add_patch, parameter[call[name[patches].Circle, parameter[name[xy]]]]]
if <ast.BoolOp object at 0x7da20c7c8d60> begin[:]
call[name[self].ax.text, parameter[binary_operation[call[name[xy]][constant[0]] - constant[0.025]], binary_operation[call[name[xy]][constant[1]] - constant[0.025]], name[name]]]
call[name[self].ax.axis, parameter[constant[equal]]]
|
keyword[def] identifier[draw] ( identifier[self] , identifier[X] , identifier[y] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[is_dataframe] ( identifier[X] ):
identifier[X] = identifier[X] . identifier[values]
identifier[nan_warnings] . identifier[warn_if_nans_exist] ( identifier[X] )
identifier[X] , identifier[y] = identifier[nan_warnings] . identifier[filter_missing] ( identifier[X] , identifier[y] )
identifier[nrows] , identifier[ncols] = identifier[X] . identifier[shape]
identifier[self] . identifier[ax] . identifier[set_xlim] ([- literal[int] , literal[int] ])
identifier[self] . identifier[ax] . identifier[set_ylim] ([- literal[int] , literal[int] ])
identifier[color_values] = identifier[resolve_colors] (
identifier[n_colors] = identifier[len] ( identifier[self] . identifier[classes_] ), identifier[colormap] = identifier[self] . identifier[colormap] , identifier[colors] = identifier[self] . identifier[color]
)
identifier[self] . identifier[_colors] = identifier[dict] ( identifier[zip] ( identifier[self] . identifier[classes_] , identifier[color_values] ))
identifier[to_plot] ={}
keyword[for] identifier[kls] keyword[in] identifier[self] . identifier[classes_] :
identifier[to_plot] [ identifier[kls] ]=[[],[]]
identifier[s] = identifier[np] . identifier[array] ([
( identifier[np] . identifier[cos] ( identifier[t] ), identifier[np] . identifier[sin] ( identifier[t] ))
keyword[for] identifier[t] keyword[in] [
literal[int] * identifier[np] . identifier[pi] *( identifier[i] / identifier[float] ( identifier[ncols] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ncols] )
]
])
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[self] . identifier[normalize] ( identifier[X] )):
identifier[row_] = identifier[np] . identifier[repeat] ( identifier[np] . identifier[expand_dims] ( identifier[row] , identifier[axis] = literal[int] ), literal[int] , identifier[axis] = literal[int] )
identifier[xy] =( identifier[s] * identifier[row_] ). identifier[sum] ( identifier[axis] = literal[int] )/ identifier[row] . identifier[sum] ()
identifier[kls] = identifier[self] . identifier[classes_] [ identifier[y] [ identifier[i] ]]
identifier[to_plot] [ identifier[kls] ][ literal[int] ]. identifier[append] ( identifier[xy] [ literal[int] ])
identifier[to_plot] [ identifier[kls] ][ literal[int] ]. identifier[append] ( identifier[xy] [ literal[int] ])
keyword[for] identifier[i] , identifier[kls] keyword[in] identifier[enumerate] ( identifier[self] . identifier[classes_] ):
identifier[self] . identifier[ax] . identifier[scatter] (
identifier[to_plot] [ identifier[kls] ][ literal[int] ], identifier[to_plot] [ identifier[kls] ][ literal[int] ], identifier[color] = identifier[self] . identifier[_colors] [ identifier[kls] ],
identifier[label] = identifier[str] ( identifier[kls] ), identifier[alpha] = identifier[self] . identifier[alpha] ,** identifier[kwargs]
)
identifier[self] . identifier[ax] . identifier[add_patch] ( identifier[patches] . identifier[Circle] (
( literal[int] , literal[int] ), identifier[radius] = literal[int] , identifier[facecolor] = literal[string] , identifier[edgecolor] = literal[string] , identifier[linewidth] = literal[int]
))
keyword[for] identifier[xy] , identifier[name] keyword[in] identifier[zip] ( identifier[s] , identifier[self] . identifier[features_] ):
identifier[self] . identifier[ax] . identifier[add_patch] ( identifier[patches] . identifier[Circle] ( identifier[xy] , identifier[radius] = literal[int] , identifier[facecolor] = literal[string] ))
keyword[if] identifier[xy] [ literal[int] ]< literal[int] keyword[and] identifier[xy] [ literal[int] ]< literal[int] :
identifier[self] . identifier[ax] . identifier[text] ( identifier[xy] [ literal[int] ]- literal[int] , identifier[xy] [ literal[int] ]- literal[int] , identifier[name] , identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[size] = literal[string] )
keyword[elif] identifier[xy] [ literal[int] ]< literal[int] keyword[and] identifier[xy] [ literal[int] ]>= literal[int] :
identifier[self] . identifier[ax] . identifier[text] ( identifier[xy] [ literal[int] ]- literal[int] , identifier[xy] [ literal[int] ]+ literal[int] , identifier[name] , identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[size] = literal[string] )
keyword[elif] identifier[xy] [ literal[int] ]>= literal[int] keyword[and] identifier[xy] [ literal[int] ]< literal[int] :
identifier[self] . identifier[ax] . identifier[text] ( identifier[xy] [ literal[int] ]+ literal[int] , identifier[xy] [ literal[int] ]- literal[int] , identifier[name] , identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[size] = literal[string] )
keyword[elif] identifier[xy] [ literal[int] ]>= literal[int] keyword[and] identifier[xy] [ literal[int] ]>= literal[int] :
identifier[self] . identifier[ax] . identifier[text] ( identifier[xy] [ literal[int] ]+ literal[int] , identifier[xy] [ literal[int] ]+ literal[int] , identifier[name] , identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[size] = literal[string] )
identifier[self] . identifier[ax] . identifier[axis] ( literal[string] )
|
def draw(self, X, y, **kwargs):
"""
Called from the fit method, this method creates the radviz canvas and
draws each instance as a class or target colored point, whose location
is determined by the feature data set.
"""
# Convert from dataframe
if is_dataframe(X):
X = X.values # depends on [control=['if'], data=[]]
# Clean out nans and warn that the user they aren't plotted
nan_warnings.warn_if_nans_exist(X)
(X, y) = nan_warnings.filter_missing(X, y)
# Get the shape of the data
(nrows, ncols) = X.shape
# Set the axes limits
self.ax.set_xlim([-1, 1])
self.ax.set_ylim([-1, 1])
# Create the colors
# TODO: Allow both colormap, listed colors, and palette definition
# TODO: Make this an independent function or property for override!
color_values = resolve_colors(n_colors=len(self.classes_), colormap=self.colormap, colors=self.color)
self._colors = dict(zip(self.classes_, color_values))
# Create a data structure to hold scatter plot representations
to_plot = {}
for kls in self.classes_:
to_plot[kls] = [[], []] # depends on [control=['for'], data=['kls']]
# Compute the arcs around the circumference for each feature axis
# TODO: make this an independent function for override
s = np.array([(np.cos(t), np.sin(t)) for t in [2.0 * np.pi * (i / float(ncols)) for i in range(ncols)]])
# Compute the locations of the scatter plot for each class
# Normalize the data first to plot along the 0, 1 axis
for (i, row) in enumerate(self.normalize(X)):
row_ = np.repeat(np.expand_dims(row, axis=1), 2, axis=1)
xy = (s * row_).sum(axis=0) / row.sum()
kls = self.classes_[y[i]]
to_plot[kls][0].append(xy[0])
to_plot[kls][1].append(xy[1]) # depends on [control=['for'], data=[]]
# Add the scatter plots from the to_plot function
# TODO: store these plots to add more instances to later
# TODO: make this a separate function
for (i, kls) in enumerate(self.classes_):
self.ax.scatter(to_plot[kls][0], to_plot[kls][1], color=self._colors[kls], label=str(kls), alpha=self.alpha, **kwargs) # depends on [control=['for'], data=[]]
# Add the circular axis path
# TODO: Make this a seperate function (along with labeling)
self.ax.add_patch(patches.Circle((0.0, 0.0), radius=1.0, facecolor='none', edgecolor='grey', linewidth=0.5))
# Add the feature names
for (xy, name) in zip(s, self.features_):
# Add the patch indicating the location of the axis
self.ax.add_patch(patches.Circle(xy, radius=0.025, facecolor='#777777'))
# Add the feature names offset around the axis marker
if xy[0] < 0.0 and xy[1] < 0.0:
self.ax.text(xy[0] - 0.025, xy[1] - 0.025, name, ha='right', va='top', size='small') # depends on [control=['if'], data=[]]
elif xy[0] < 0.0 and xy[1] >= 0.0:
self.ax.text(xy[0] - 0.025, xy[1] + 0.025, name, ha='right', va='bottom', size='small') # depends on [control=['if'], data=[]]
elif xy[0] >= 0.0 and xy[1] < 0.0:
self.ax.text(xy[0] + 0.025, xy[1] - 0.025, name, ha='left', va='top', size='small') # depends on [control=['if'], data=[]]
elif xy[0] >= 0.0 and xy[1] >= 0.0:
self.ax.text(xy[0] + 0.025, xy[1] + 0.025, name, ha='left', va='bottom', size='small') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.ax.axis('equal')
|
def add_global(self, globalvalue):
"""
Add a new global value.
"""
assert globalvalue.name not in self.globals
self.globals[globalvalue.name] = globalvalue
|
def function[add_global, parameter[self, globalvalue]]:
constant[
Add a new global value.
]
assert[compare[name[globalvalue].name <ast.NotIn object at 0x7da2590d7190> name[self].globals]]
call[name[self].globals][name[globalvalue].name] assign[=] name[globalvalue]
|
keyword[def] identifier[add_global] ( identifier[self] , identifier[globalvalue] ):
literal[string]
keyword[assert] identifier[globalvalue] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[globals]
identifier[self] . identifier[globals] [ identifier[globalvalue] . identifier[name] ]= identifier[globalvalue]
|
def add_global(self, globalvalue):
"""
Add a new global value.
"""
assert globalvalue.name not in self.globals
self.globals[globalvalue.name] = globalvalue
|
def set_errors(self, errors):
"""Set parameter error estimate """
if errors is None:
self.__errors__ = None
return
self.__errors__ = [asscalar(e) for e in errors]
|
def function[set_errors, parameter[self, errors]]:
constant[Set parameter error estimate ]
if compare[name[errors] is constant[None]] begin[:]
name[self].__errors__ assign[=] constant[None]
return[None]
name[self].__errors__ assign[=] <ast.ListComp object at 0x7da1b0147010>
|
keyword[def] identifier[set_errors] ( identifier[self] , identifier[errors] ):
literal[string]
keyword[if] identifier[errors] keyword[is] keyword[None] :
identifier[self] . identifier[__errors__] = keyword[None]
keyword[return]
identifier[self] . identifier[__errors__] =[ identifier[asscalar] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[errors] ]
|
def set_errors(self, errors):
"""Set parameter error estimate """
if errors is None:
self.__errors__ = None
return # depends on [control=['if'], data=[]]
self.__errors__ = [asscalar(e) for e in errors]
|
def assemble_flash_code(self, asm):
"""
assemble the given code and program the Flash
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.flash.program(result)
return None, result
|
def function[assemble_flash_code, parameter[self, asm]]:
constant[
assemble the given code and program the Flash
]
variable[stream] assign[=] call[name[StringIO], parameter[name[asm]]]
variable[worker] assign[=] call[name[assembler].Assembler, parameter[name[self].processor, name[stream]]]
<ast.Try object at 0x7da20e956470>
call[name[self].flash.program, parameter[name[result]]]
return[tuple[[<ast.Constant object at 0x7da20e957e50>, <ast.Name object at 0x7da20e954070>]]]
|
keyword[def] identifier[assemble_flash_code] ( identifier[self] , identifier[asm] ):
literal[string]
identifier[stream] = identifier[StringIO] ( identifier[asm] )
identifier[worker] = identifier[assembler] . identifier[Assembler] ( identifier[self] . identifier[processor] , identifier[stream] )
keyword[try] :
identifier[result] = identifier[worker] . identifier[assemble] ()
keyword[except] identifier[BaseException] keyword[as] identifier[e] :
keyword[return] identifier[e] , keyword[None]
identifier[self] . identifier[flash] . identifier[program] ( identifier[result] )
keyword[return] keyword[None] , identifier[result]
|
def assemble_flash_code(self, asm):
"""
assemble the given code and program the Flash
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble() # depends on [control=['try'], data=[]]
except BaseException as e:
return (e, None) # depends on [control=['except'], data=['e']]
self.flash.program(result)
return (None, result)
|
def setup_smtp_factory(**settings):
""" expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance"""
return CustomSMTP(
host=settings.get('mail.host', 'localhost'),
port=int(settings.get('mail.port', 25)),
user=settings.get('mail.user'),
password=settings.get('mail.password'),
timeout=float(settings.get('mail.timeout', 60)),
)
|
def function[setup_smtp_factory, parameter[]]:
constant[ expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance]
return[call[name[CustomSMTP], parameter[]]]
|
keyword[def] identifier[setup_smtp_factory] (** identifier[settings] ):
literal[string]
keyword[return] identifier[CustomSMTP] (
identifier[host] = identifier[settings] . identifier[get] ( literal[string] , literal[string] ),
identifier[port] = identifier[int] ( identifier[settings] . identifier[get] ( literal[string] , literal[int] )),
identifier[user] = identifier[settings] . identifier[get] ( literal[string] ),
identifier[password] = identifier[settings] . identifier[get] ( literal[string] ),
identifier[timeout] = identifier[float] ( identifier[settings] . identifier[get] ( literal[string] , literal[int] )),
)
|
def setup_smtp_factory(**settings):
""" expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance"""
return CustomSMTP(host=settings.get('mail.host', 'localhost'), port=int(settings.get('mail.port', 25)), user=settings.get('mail.user'), password=settings.get('mail.password'), timeout=float(settings.get('mail.timeout', 60)))
|
def get_item(self, address, state = 'fresh'):
"""Get an item from the cache.
:Parameters:
- `address`: its address.
- `state`: the worst state that is acceptable.
:Types:
- `address`: any hashable
- `state`: `str`
:return: the item or `None` if it was not found.
:returntype: `CacheItem`"""
self._lock.acquire()
try:
item = self._items.get(address)
if not item:
return None
self.update_item(item)
if _state_values[state] >= item.state_value:
return item
return None
finally:
self._lock.release()
|
def function[get_item, parameter[self, address, state]]:
constant[Get an item from the cache.
:Parameters:
- `address`: its address.
- `state`: the worst state that is acceptable.
:Types:
- `address`: any hashable
- `state`: `str`
:return: the item or `None` if it was not found.
:returntype: `CacheItem`]
call[name[self]._lock.acquire, parameter[]]
<ast.Try object at 0x7da18ede6680>
|
keyword[def] identifier[get_item] ( identifier[self] , identifier[address] , identifier[state] = literal[string] ):
literal[string]
identifier[self] . identifier[_lock] . identifier[acquire] ()
keyword[try] :
identifier[item] = identifier[self] . identifier[_items] . identifier[get] ( identifier[address] )
keyword[if] keyword[not] identifier[item] :
keyword[return] keyword[None]
identifier[self] . identifier[update_item] ( identifier[item] )
keyword[if] identifier[_state_values] [ identifier[state] ]>= identifier[item] . identifier[state_value] :
keyword[return] identifier[item]
keyword[return] keyword[None]
keyword[finally] :
identifier[self] . identifier[_lock] . identifier[release] ()
|
def get_item(self, address, state='fresh'):
"""Get an item from the cache.
:Parameters:
- `address`: its address.
- `state`: the worst state that is acceptable.
:Types:
- `address`: any hashable
- `state`: `str`
:return: the item or `None` if it was not found.
:returntype: `CacheItem`"""
self._lock.acquire()
try:
item = self._items.get(address)
if not item:
return None # depends on [control=['if'], data=[]]
self.update_item(item)
if _state_values[state] >= item.state_value:
return item # depends on [control=['if'], data=[]]
return None # depends on [control=['try'], data=[]]
finally:
self._lock.release()
|
def fit(self, data):
"""Fit VAR model to data.
Parameters
----------
data : array, shape (trials, channels, samples) or (channels, samples)
Epoched or continuous data set.
Returns
-------
self : :class:`VAR`
The :class:`VAR` object to facilitate method chaining (see usage
example).
"""
data = atleast_3d(data)
if self.delta == 0 or self.delta is None:
# ordinary least squares
x, y = self._construct_eqns(data)
else:
# regularized least squares (ridge regression)
x, y = self._construct_eqns_rls(data)
b, res, rank, s = sp.linalg.lstsq(x, y)
self.coef = b.transpose()
self.residuals = data - self.predict(data)
self.rescov = sp.cov(cat_trials(self.residuals[:, :, self.p:]))
return self
|
def function[fit, parameter[self, data]]:
constant[Fit VAR model to data.
Parameters
----------
data : array, shape (trials, channels, samples) or (channels, samples)
Epoched or continuous data set.
Returns
-------
self : :class:`VAR`
The :class:`VAR` object to facilitate method chaining (see usage
example).
]
variable[data] assign[=] call[name[atleast_3d], parameter[name[data]]]
if <ast.BoolOp object at 0x7da1b2631030> begin[:]
<ast.Tuple object at 0x7da1b2631cf0> assign[=] call[name[self]._construct_eqns, parameter[name[data]]]
<ast.Tuple object at 0x7da1b2630e20> assign[=] call[name[sp].linalg.lstsq, parameter[name[x], name[y]]]
name[self].coef assign[=] call[name[b].transpose, parameter[]]
name[self].residuals assign[=] binary_operation[name[data] - call[name[self].predict, parameter[name[data]]]]
name[self].rescov assign[=] call[name[sp].cov, parameter[call[name[cat_trials], parameter[call[name[self].residuals][tuple[[<ast.Slice object at 0x7da1b2633220>, <ast.Slice object at 0x7da1b2632c80>, <ast.Slice object at 0x7da1b2632c50>]]]]]]]
return[name[self]]
|
keyword[def] identifier[fit] ( identifier[self] , identifier[data] ):
literal[string]
identifier[data] = identifier[atleast_3d] ( identifier[data] )
keyword[if] identifier[self] . identifier[delta] == literal[int] keyword[or] identifier[self] . identifier[delta] keyword[is] keyword[None] :
identifier[x] , identifier[y] = identifier[self] . identifier[_construct_eqns] ( identifier[data] )
keyword[else] :
identifier[x] , identifier[y] = identifier[self] . identifier[_construct_eqns_rls] ( identifier[data] )
identifier[b] , identifier[res] , identifier[rank] , identifier[s] = identifier[sp] . identifier[linalg] . identifier[lstsq] ( identifier[x] , identifier[y] )
identifier[self] . identifier[coef] = identifier[b] . identifier[transpose] ()
identifier[self] . identifier[residuals] = identifier[data] - identifier[self] . identifier[predict] ( identifier[data] )
identifier[self] . identifier[rescov] = identifier[sp] . identifier[cov] ( identifier[cat_trials] ( identifier[self] . identifier[residuals] [:,:, identifier[self] . identifier[p] :]))
keyword[return] identifier[self]
|
def fit(self, data):
"""Fit VAR model to data.
Parameters
----------
data : array, shape (trials, channels, samples) or (channels, samples)
Epoched or continuous data set.
Returns
-------
self : :class:`VAR`
The :class:`VAR` object to facilitate method chaining (see usage
example).
"""
data = atleast_3d(data)
if self.delta == 0 or self.delta is None:
# ordinary least squares
(x, y) = self._construct_eqns(data) # depends on [control=['if'], data=[]]
else:
# regularized least squares (ridge regression)
(x, y) = self._construct_eqns_rls(data)
(b, res, rank, s) = sp.linalg.lstsq(x, y)
self.coef = b.transpose()
self.residuals = data - self.predict(data)
self.rescov = sp.cov(cat_trials(self.residuals[:, :, self.p:]))
return self
|
def parse_time_range(start_dt, end_dt):
"""
Convert the start/end datetimes specified by the user, specifically:
- truncate any minutes/seconds
- for a missing end time, use start + 24 hours
- for a missing start time, use end - 24 hours
- for missing start and end, use the last 24 hours
"""
now = datetime.now()
if start_dt and not end_dt:
end_dt = now
elif end_dt and not start_dt:
start_dt = _EARLIEST_DATE
elif not start_dt and not end_dt: # last 24 hours
end_dt = now
start_dt = end_dt - timedelta(days=1)
return tuple(map(truncate_hour_fraction, (start_dt, end_dt)))
|
def function[parse_time_range, parameter[start_dt, end_dt]]:
constant[
Convert the start/end datetimes specified by the user, specifically:
- truncate any minutes/seconds
- for a missing end time, use start + 24 hours
- for a missing start time, use end - 24 hours
- for missing start and end, use the last 24 hours
]
variable[now] assign[=] call[name[datetime].now, parameter[]]
if <ast.BoolOp object at 0x7da18f810250> begin[:]
variable[end_dt] assign[=] name[now]
return[call[name[tuple], parameter[call[name[map], parameter[name[truncate_hour_fraction], tuple[[<ast.Name object at 0x7da18f812470>, <ast.Name object at 0x7da18f813430>]]]]]]]
|
keyword[def] identifier[parse_time_range] ( identifier[start_dt] , identifier[end_dt] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[now] ()
keyword[if] identifier[start_dt] keyword[and] keyword[not] identifier[end_dt] :
identifier[end_dt] = identifier[now]
keyword[elif] identifier[end_dt] keyword[and] keyword[not] identifier[start_dt] :
identifier[start_dt] = identifier[_EARLIEST_DATE]
keyword[elif] keyword[not] identifier[start_dt] keyword[and] keyword[not] identifier[end_dt] :
identifier[end_dt] = identifier[now]
identifier[start_dt] = identifier[end_dt] - identifier[timedelta] ( identifier[days] = literal[int] )
keyword[return] identifier[tuple] ( identifier[map] ( identifier[truncate_hour_fraction] ,( identifier[start_dt] , identifier[end_dt] )))
|
def parse_time_range(start_dt, end_dt):
"""
Convert the start/end datetimes specified by the user, specifically:
- truncate any minutes/seconds
- for a missing end time, use start + 24 hours
- for a missing start time, use end - 24 hours
- for missing start and end, use the last 24 hours
"""
now = datetime.now()
if start_dt and (not end_dt):
end_dt = now # depends on [control=['if'], data=[]]
elif end_dt and (not start_dt):
start_dt = _EARLIEST_DATE # depends on [control=['if'], data=[]]
elif not start_dt and (not end_dt): # last 24 hours
end_dt = now
start_dt = end_dt - timedelta(days=1) # depends on [control=['if'], data=[]]
return tuple(map(truncate_hour_fraction, (start_dt, end_dt)))
|
def _build_query(self):
"""
Build the base query dictionary
"""
if isinstance(self._query_string, QueryString):
self._query_dsl = self._query_string
elif isinstance(self._query_string, string_types):
self._query_dsl = QueryString(self._query_string)
else:
self._query_dsl = MatchAll()
|
def function[_build_query, parameter[self]]:
constant[
Build the base query dictionary
]
if call[name[isinstance], parameter[name[self]._query_string, name[QueryString]]] begin[:]
name[self]._query_dsl assign[=] name[self]._query_string
|
keyword[def] identifier[_build_query] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_query_string] , identifier[QueryString] ):
identifier[self] . identifier[_query_dsl] = identifier[self] . identifier[_query_string]
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[_query_string] , identifier[string_types] ):
identifier[self] . identifier[_query_dsl] = identifier[QueryString] ( identifier[self] . identifier[_query_string] )
keyword[else] :
identifier[self] . identifier[_query_dsl] = identifier[MatchAll] ()
|
def _build_query(self):
"""
Build the base query dictionary
"""
if isinstance(self._query_string, QueryString):
self._query_dsl = self._query_string # depends on [control=['if'], data=[]]
elif isinstance(self._query_string, string_types):
self._query_dsl = QueryString(self._query_string) # depends on [control=['if'], data=[]]
else:
self._query_dsl = MatchAll()
|
def scalars_for_mapping_ion_drifts(glats, glons, alts, dates, step_size=None,
max_steps=None, e_field_scaling_only=False):
"""
Calculates scalars for translating ion motions at position
glat, glon, and alt, for date, to the footpoints of the field line
as well as at the magnetic equator.
All inputs are assumed to be 1D arrays.
Note
----
Directions refer to the ion motion direction e.g. the zonal
scalar applies to zonal ion motions (meridional E field assuming ExB ion motion)
Parameters
----------
glats : list-like of floats (degrees)
Geodetic (WGS84) latitude
glons : list-like of floats (degrees)
Geodetic (WGS84) longitude
alts : list-like of floats (km)
Geodetic (WGS84) altitude, height above surface
dates : list-like of datetimes
Date and time for determination of scalars
e_field_scaling_only : boolean (False)
If True, method only calculates the electric field scalar, ignoring
changes in magnitude of B. Note ion velocity related to E/B.
Returns
-------
dict
array-like of scalars for translating ion drifts. Keys are,
'north_zonal_drifts_scalar', 'north_mer_drifts_scalar', and similarly
for southern locations. 'equator_mer_drifts_scalar' and
'equator_zonal_drifts_scalar' cover the mappings to the equator.
"""
if step_size is None:
step_size = 100.
if max_steps is None:
max_steps = 1000
steps = np.arange(max_steps)
# use spacecraft location to get ECEF
ecef_xs, ecef_ys, ecef_zs = geodetic_to_ecef(glats, glons, alts)
# prepare output
eq_zon_drifts_scalar = []
eq_mer_drifts_scalar = []
# magnetic field info
north_mag_scalar = []
south_mag_scalar = []
eq_mag_scalar = []
out = {}
# meridional e-field scalar map, can also be
# zonal ion drift scalar map
# print ('Starting Northern')
north_zon_drifts_scalar, mind_plus, mind_minus = closed_loop_edge_lengths_via_footpoint(glats,
glons, alts, dates, 'north',
'meridional',
step_size=step_size,
max_steps=max_steps,
edge_length=25.,
edge_steps=5)
north_mer_drifts_scalar, mind_plus, mind_minus = closed_loop_edge_lengths_via_footpoint(glats,
glons, alts, dates, 'north',
'zonal',
step_size=step_size,
max_steps=max_steps,
edge_length=25.,
edge_steps=5)
# print ('Starting Southern')
south_zon_drifts_scalar, mind_plus, mind_minus = closed_loop_edge_lengths_via_footpoint(glats,
glons, alts, dates, 'south',
'meridional',
step_size=step_size,
max_steps=max_steps,
edge_length=25.,
edge_steps=5)
south_mer_drifts_scalar, mind_plus, mind_minus = closed_loop_edge_lengths_via_footpoint(glats,
glons, alts, dates, 'south',
'zonal',
step_size=step_size,
max_steps=max_steps,
edge_length=25.,
edge_steps=5)
# print ('Starting Equatorial')
# , step_zon_apex2, mind_plus, mind_minus
eq_zon_drifts_scalar = closed_loop_edge_lengths_via_equator(glats, glons, alts, dates,
'meridional',
edge_length=25.,
edge_steps=5)
# , step_mer_apex2, mind_plus, mind_minus
eq_mer_drifts_scalar = closed_loop_edge_lengths_via_equator(glats, glons, alts, dates,
'zonal',
edge_length=25.,
edge_steps=5)
# print ('Done with core')
north_zon_drifts_scalar = north_zon_drifts_scalar/50.
south_zon_drifts_scalar = south_zon_drifts_scalar/50.
north_mer_drifts_scalar = north_mer_drifts_scalar/50.
south_mer_drifts_scalar = south_mer_drifts_scalar/50.
# equatorial
eq_zon_drifts_scalar = 50./eq_zon_drifts_scalar
eq_mer_drifts_scalar = 50./eq_mer_drifts_scalar
if e_field_scaling_only:
# prepare output
out['north_mer_fields_scalar'] = north_zon_drifts_scalar
out['south_mer_fields_scalar'] = south_zon_drifts_scalar
out['north_zon_fields_scalar'] = north_mer_drifts_scalar
out['south_zon_fields_scalar'] = south_mer_drifts_scalar
out['equator_mer_fields_scalar'] = eq_zon_drifts_scalar
out['equator_zon_fields_scalar'] = eq_mer_drifts_scalar
else:
# figure out scaling for drifts based upon change in magnetic field
# strength
for ecef_x, ecef_y, ecef_z, glat, glon, alt, date in zip(ecef_xs, ecef_ys, ecef_zs,
glats, glons, alts,
dates):
yr, doy = pysat.utils.getyrdoy(date)
double_date = float(yr) + float(doy) / 366.
# get location of apex for s/c field line
apex_x, apex_y, apex_z, apex_lat, apex_lon, apex_alt = apex_location_info(
[glat], [glon],
[alt], [date])
# trace to northern footpoint
sc_root = np.array([ecef_x, ecef_y, ecef_z])
trace_north = field_line_trace(sc_root, double_date, 1., 120.,
steps=steps,
step_size=step_size,
max_steps=max_steps)
# southern tracing
trace_south = field_line_trace(sc_root, double_date, -1., 120.,
steps=steps,
step_size=step_size,
max_steps=max_steps)
# footpoint location
north_ftpnt = trace_north[-1, :]
nft_glat, nft_glon, nft_alt = ecef_to_geodetic(*north_ftpnt)
south_ftpnt = trace_south[-1, :]
sft_glat, sft_glon, sft_alt = ecef_to_geodetic(*south_ftpnt)
# scalar for the northern footpoint electric field based on distances
# for drift also need to include the magnetic field, drift = E/B
tbn, tbe, tbd, b_sc = igrf.igrf12syn(0, double_date, 1, alt,
np.deg2rad(90.-glat),
np.deg2rad(glon))
# get mag field and scalar for northern footpoint
tbn, tbe, tbd, b_nft = igrf.igrf12syn(0, double_date, 1, nft_alt,
np.deg2rad(90.-nft_glat),
np.deg2rad(nft_glon))
north_mag_scalar.append(b_sc/b_nft)
# equatorial values
tbn, tbe, tbd, b_eq = igrf.igrf12syn(0, double_date, 1, apex_alt,
np.deg2rad(90.-apex_lat),
np.deg2rad(apex_lon))
eq_mag_scalar.append(b_sc/b_eq)
# scalar for the southern footpoint
tbn, tbe, tbd, b_sft = igrf.igrf12syn(0, double_date, 1, sft_alt,
np.deg2rad(90.-sft_glat),
np.deg2rad(sft_glon))
south_mag_scalar.append(b_sc/b_sft)
# make E-Field scalars to drifts
# lists to arrays
north_mag_scalar = np.array(north_mag_scalar)
south_mag_scalar = np.array(south_mag_scalar)
eq_mag_scalar = np.array(eq_mag_scalar)
# apply to electric field scaling to get ion drift values
north_zon_drifts_scalar = north_zon_drifts_scalar*north_mag_scalar
south_zon_drifts_scalar = south_zon_drifts_scalar*south_mag_scalar
north_mer_drifts_scalar = north_mer_drifts_scalar*north_mag_scalar
south_mer_drifts_scalar = south_mer_drifts_scalar*south_mag_scalar
# equatorial
eq_zon_drifts_scalar = eq_zon_drifts_scalar*eq_mag_scalar
eq_mer_drifts_scalar = eq_mer_drifts_scalar*eq_mag_scalar
# output
out['north_zonal_drifts_scalar'] = north_zon_drifts_scalar
out['south_zonal_drifts_scalar'] = south_zon_drifts_scalar
out['north_mer_drifts_scalar'] = north_mer_drifts_scalar
out['south_mer_drifts_scalar'] = south_mer_drifts_scalar
out['equator_zonal_drifts_scalar'] = eq_zon_drifts_scalar
out['equator_mer_drifts_scalar'] = eq_mer_drifts_scalar
return out
|
def function[scalars_for_mapping_ion_drifts, parameter[glats, glons, alts, dates, step_size, max_steps, e_field_scaling_only]]:
constant[
Calculates scalars for translating ion motions at position
glat, glon, and alt, for date, to the footpoints of the field line
as well as at the magnetic equator.
All inputs are assumed to be 1D arrays.
Note
----
Directions refer to the ion motion direction e.g. the zonal
scalar applies to zonal ion motions (meridional E field assuming ExB ion motion)
Parameters
----------
glats : list-like of floats (degrees)
Geodetic (WGS84) latitude
glons : list-like of floats (degrees)
Geodetic (WGS84) longitude
alts : list-like of floats (km)
Geodetic (WGS84) altitude, height above surface
dates : list-like of datetimes
Date and time for determination of scalars
e_field_scaling_only : boolean (False)
If True, method only calculates the electric field scalar, ignoring
changes in magnitude of B. Note ion velocity related to E/B.
Returns
-------
dict
array-like of scalars for translating ion drifts. Keys are,
'north_zonal_drifts_scalar', 'north_mer_drifts_scalar', and similarly
for southern locations. 'equator_mer_drifts_scalar' and
'equator_zonal_drifts_scalar' cover the mappings to the equator.
]
if compare[name[step_size] is constant[None]] begin[:]
variable[step_size] assign[=] constant[100.0]
if compare[name[max_steps] is constant[None]] begin[:]
variable[max_steps] assign[=] constant[1000]
variable[steps] assign[=] call[name[np].arange, parameter[name[max_steps]]]
<ast.Tuple object at 0x7da20c7945b0> assign[=] call[name[geodetic_to_ecef], parameter[name[glats], name[glons], name[alts]]]
variable[eq_zon_drifts_scalar] assign[=] list[[]]
variable[eq_mer_drifts_scalar] assign[=] list[[]]
variable[north_mag_scalar] assign[=] list[[]]
variable[south_mag_scalar] assign[=] list[[]]
variable[eq_mag_scalar] assign[=] list[[]]
variable[out] assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da20c794b80> assign[=] call[name[closed_loop_edge_lengths_via_footpoint], parameter[name[glats], name[glons], name[alts], name[dates], constant[north], constant[meridional]]]
<ast.Tuple object at 0x7da20c7943d0> assign[=] call[name[closed_loop_edge_lengths_via_footpoint], parameter[name[glats], name[glons], name[alts], name[dates], constant[north], constant[zonal]]]
<ast.Tuple object at 0x7da20c795ff0> assign[=] call[name[closed_loop_edge_lengths_via_footpoint], parameter[name[glats], name[glons], name[alts], name[dates], constant[south], constant[meridional]]]
<ast.Tuple object at 0x7da20c7949d0> assign[=] call[name[closed_loop_edge_lengths_via_footpoint], parameter[name[glats], name[glons], name[alts], name[dates], constant[south], constant[zonal]]]
variable[eq_zon_drifts_scalar] assign[=] call[name[closed_loop_edge_lengths_via_equator], parameter[name[glats], name[glons], name[alts], name[dates], constant[meridional]]]
variable[eq_mer_drifts_scalar] assign[=] call[name[closed_loop_edge_lengths_via_equator], parameter[name[glats], name[glons], name[alts], name[dates], constant[zonal]]]
variable[north_zon_drifts_scalar] assign[=] binary_operation[name[north_zon_drifts_scalar] / constant[50.0]]
variable[south_zon_drifts_scalar] assign[=] binary_operation[name[south_zon_drifts_scalar] / constant[50.0]]
variable[north_mer_drifts_scalar] assign[=] binary_operation[name[north_mer_drifts_scalar] / constant[50.0]]
variable[south_mer_drifts_scalar] assign[=] binary_operation[name[south_mer_drifts_scalar] / constant[50.0]]
variable[eq_zon_drifts_scalar] assign[=] binary_operation[constant[50.0] / name[eq_zon_drifts_scalar]]
variable[eq_mer_drifts_scalar] assign[=] binary_operation[constant[50.0] / name[eq_mer_drifts_scalar]]
if name[e_field_scaling_only] begin[:]
call[name[out]][constant[north_mer_fields_scalar]] assign[=] name[north_zon_drifts_scalar]
call[name[out]][constant[south_mer_fields_scalar]] assign[=] name[south_zon_drifts_scalar]
call[name[out]][constant[north_zon_fields_scalar]] assign[=] name[north_mer_drifts_scalar]
call[name[out]][constant[south_zon_fields_scalar]] assign[=] name[south_mer_drifts_scalar]
call[name[out]][constant[equator_mer_fields_scalar]] assign[=] name[eq_zon_drifts_scalar]
call[name[out]][constant[equator_zon_fields_scalar]] assign[=] name[eq_mer_drifts_scalar]
return[name[out]]
|
keyword[def] identifier[scalars_for_mapping_ion_drifts] ( identifier[glats] , identifier[glons] , identifier[alts] , identifier[dates] , identifier[step_size] = keyword[None] ,
identifier[max_steps] = keyword[None] , identifier[e_field_scaling_only] = keyword[False] ):
literal[string]
keyword[if] identifier[step_size] keyword[is] keyword[None] :
identifier[step_size] = literal[int]
keyword[if] identifier[max_steps] keyword[is] keyword[None] :
identifier[max_steps] = literal[int]
identifier[steps] = identifier[np] . identifier[arange] ( identifier[max_steps] )
identifier[ecef_xs] , identifier[ecef_ys] , identifier[ecef_zs] = identifier[geodetic_to_ecef] ( identifier[glats] , identifier[glons] , identifier[alts] )
identifier[eq_zon_drifts_scalar] =[]
identifier[eq_mer_drifts_scalar] =[]
identifier[north_mag_scalar] =[]
identifier[south_mag_scalar] =[]
identifier[eq_mag_scalar] =[]
identifier[out] ={}
identifier[north_zon_drifts_scalar] , identifier[mind_plus] , identifier[mind_minus] = identifier[closed_loop_edge_lengths_via_footpoint] ( identifier[glats] ,
identifier[glons] , identifier[alts] , identifier[dates] , literal[string] ,
literal[string] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[north_mer_drifts_scalar] , identifier[mind_plus] , identifier[mind_minus] = identifier[closed_loop_edge_lengths_via_footpoint] ( identifier[glats] ,
identifier[glons] , identifier[alts] , identifier[dates] , literal[string] ,
literal[string] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[south_zon_drifts_scalar] , identifier[mind_plus] , identifier[mind_minus] = identifier[closed_loop_edge_lengths_via_footpoint] ( identifier[glats] ,
identifier[glons] , identifier[alts] , identifier[dates] , literal[string] ,
literal[string] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[south_mer_drifts_scalar] , identifier[mind_plus] , identifier[mind_minus] = identifier[closed_loop_edge_lengths_via_footpoint] ( identifier[glats] ,
identifier[glons] , identifier[alts] , identifier[dates] , literal[string] ,
literal[string] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[eq_zon_drifts_scalar] = identifier[closed_loop_edge_lengths_via_equator] ( identifier[glats] , identifier[glons] , identifier[alts] , identifier[dates] ,
literal[string] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[eq_mer_drifts_scalar] = identifier[closed_loop_edge_lengths_via_equator] ( identifier[glats] , identifier[glons] , identifier[alts] , identifier[dates] ,
literal[string] ,
identifier[edge_length] = literal[int] ,
identifier[edge_steps] = literal[int] )
identifier[north_zon_drifts_scalar] = identifier[north_zon_drifts_scalar] / literal[int]
identifier[south_zon_drifts_scalar] = identifier[south_zon_drifts_scalar] / literal[int]
identifier[north_mer_drifts_scalar] = identifier[north_mer_drifts_scalar] / literal[int]
identifier[south_mer_drifts_scalar] = identifier[south_mer_drifts_scalar] / literal[int]
identifier[eq_zon_drifts_scalar] = literal[int] / identifier[eq_zon_drifts_scalar]
identifier[eq_mer_drifts_scalar] = literal[int] / identifier[eq_mer_drifts_scalar]
keyword[if] identifier[e_field_scaling_only] :
identifier[out] [ literal[string] ]= identifier[north_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[south_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[north_mer_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[south_mer_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[eq_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[eq_mer_drifts_scalar]
keyword[else] :
keyword[for] identifier[ecef_x] , identifier[ecef_y] , identifier[ecef_z] , identifier[glat] , identifier[glon] , identifier[alt] , identifier[date] keyword[in] identifier[zip] ( identifier[ecef_xs] , identifier[ecef_ys] , identifier[ecef_zs] ,
identifier[glats] , identifier[glons] , identifier[alts] ,
identifier[dates] ):
identifier[yr] , identifier[doy] = identifier[pysat] . identifier[utils] . identifier[getyrdoy] ( identifier[date] )
identifier[double_date] = identifier[float] ( identifier[yr] )+ identifier[float] ( identifier[doy] )/ literal[int]
identifier[apex_x] , identifier[apex_y] , identifier[apex_z] , identifier[apex_lat] , identifier[apex_lon] , identifier[apex_alt] = identifier[apex_location_info] (
[ identifier[glat] ],[ identifier[glon] ],
[ identifier[alt] ],[ identifier[date] ])
identifier[sc_root] = identifier[np] . identifier[array] ([ identifier[ecef_x] , identifier[ecef_y] , identifier[ecef_z] ])
identifier[trace_north] = identifier[field_line_trace] ( identifier[sc_root] , identifier[double_date] , literal[int] , literal[int] ,
identifier[steps] = identifier[steps] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] )
identifier[trace_south] = identifier[field_line_trace] ( identifier[sc_root] , identifier[double_date] ,- literal[int] , literal[int] ,
identifier[steps] = identifier[steps] ,
identifier[step_size] = identifier[step_size] ,
identifier[max_steps] = identifier[max_steps] )
identifier[north_ftpnt] = identifier[trace_north] [- literal[int] ,:]
identifier[nft_glat] , identifier[nft_glon] , identifier[nft_alt] = identifier[ecef_to_geodetic] (* identifier[north_ftpnt] )
identifier[south_ftpnt] = identifier[trace_south] [- literal[int] ,:]
identifier[sft_glat] , identifier[sft_glon] , identifier[sft_alt] = identifier[ecef_to_geodetic] (* identifier[south_ftpnt] )
identifier[tbn] , identifier[tbe] , identifier[tbd] , identifier[b_sc] = identifier[igrf] . identifier[igrf12syn] ( literal[int] , identifier[double_date] , literal[int] , identifier[alt] ,
identifier[np] . identifier[deg2rad] ( literal[int] - identifier[glat] ),
identifier[np] . identifier[deg2rad] ( identifier[glon] ))
identifier[tbn] , identifier[tbe] , identifier[tbd] , identifier[b_nft] = identifier[igrf] . identifier[igrf12syn] ( literal[int] , identifier[double_date] , literal[int] , identifier[nft_alt] ,
identifier[np] . identifier[deg2rad] ( literal[int] - identifier[nft_glat] ),
identifier[np] . identifier[deg2rad] ( identifier[nft_glon] ))
identifier[north_mag_scalar] . identifier[append] ( identifier[b_sc] / identifier[b_nft] )
identifier[tbn] , identifier[tbe] , identifier[tbd] , identifier[b_eq] = identifier[igrf] . identifier[igrf12syn] ( literal[int] , identifier[double_date] , literal[int] , identifier[apex_alt] ,
identifier[np] . identifier[deg2rad] ( literal[int] - identifier[apex_lat] ),
identifier[np] . identifier[deg2rad] ( identifier[apex_lon] ))
identifier[eq_mag_scalar] . identifier[append] ( identifier[b_sc] / identifier[b_eq] )
identifier[tbn] , identifier[tbe] , identifier[tbd] , identifier[b_sft] = identifier[igrf] . identifier[igrf12syn] ( literal[int] , identifier[double_date] , literal[int] , identifier[sft_alt] ,
identifier[np] . identifier[deg2rad] ( literal[int] - identifier[sft_glat] ),
identifier[np] . identifier[deg2rad] ( identifier[sft_glon] ))
identifier[south_mag_scalar] . identifier[append] ( identifier[b_sc] / identifier[b_sft] )
identifier[north_mag_scalar] = identifier[np] . identifier[array] ( identifier[north_mag_scalar] )
identifier[south_mag_scalar] = identifier[np] . identifier[array] ( identifier[south_mag_scalar] )
identifier[eq_mag_scalar] = identifier[np] . identifier[array] ( identifier[eq_mag_scalar] )
identifier[north_zon_drifts_scalar] = identifier[north_zon_drifts_scalar] * identifier[north_mag_scalar]
identifier[south_zon_drifts_scalar] = identifier[south_zon_drifts_scalar] * identifier[south_mag_scalar]
identifier[north_mer_drifts_scalar] = identifier[north_mer_drifts_scalar] * identifier[north_mag_scalar]
identifier[south_mer_drifts_scalar] = identifier[south_mer_drifts_scalar] * identifier[south_mag_scalar]
identifier[eq_zon_drifts_scalar] = identifier[eq_zon_drifts_scalar] * identifier[eq_mag_scalar]
identifier[eq_mer_drifts_scalar] = identifier[eq_mer_drifts_scalar] * identifier[eq_mag_scalar]
identifier[out] [ literal[string] ]= identifier[north_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[south_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[north_mer_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[south_mer_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[eq_zon_drifts_scalar]
identifier[out] [ literal[string] ]= identifier[eq_mer_drifts_scalar]
keyword[return] identifier[out]
|
def scalars_for_mapping_ion_drifts(glats, glons, alts, dates, step_size=None, max_steps=None, e_field_scaling_only=False):
"""
Calculates scalars for translating ion motions at position
glat, glon, and alt, for date, to the footpoints of the field line
as well as at the magnetic equator.
All inputs are assumed to be 1D arrays.
Note
----
Directions refer to the ion motion direction e.g. the zonal
scalar applies to zonal ion motions (meridional E field assuming ExB ion motion)
Parameters
----------
glats : list-like of floats (degrees)
Geodetic (WGS84) latitude
glons : list-like of floats (degrees)
Geodetic (WGS84) longitude
alts : list-like of floats (km)
Geodetic (WGS84) altitude, height above surface
dates : list-like of datetimes
Date and time for determination of scalars
e_field_scaling_only : boolean (False)
If True, method only calculates the electric field scalar, ignoring
changes in magnitude of B. Note ion velocity related to E/B.
Returns
-------
dict
array-like of scalars for translating ion drifts. Keys are,
'north_zonal_drifts_scalar', 'north_mer_drifts_scalar', and similarly
for southern locations. 'equator_mer_drifts_scalar' and
'equator_zonal_drifts_scalar' cover the mappings to the equator.
"""
if step_size is None:
step_size = 100.0 # depends on [control=['if'], data=['step_size']]
if max_steps is None:
max_steps = 1000 # depends on [control=['if'], data=['max_steps']]
steps = np.arange(max_steps)
# use spacecraft location to get ECEF
(ecef_xs, ecef_ys, ecef_zs) = geodetic_to_ecef(glats, glons, alts)
# prepare output
eq_zon_drifts_scalar = []
eq_mer_drifts_scalar = []
# magnetic field info
north_mag_scalar = []
south_mag_scalar = []
eq_mag_scalar = []
out = {}
# meridional e-field scalar map, can also be
# zonal ion drift scalar map
# print ('Starting Northern')
(north_zon_drifts_scalar, mind_plus, mind_minus) = closed_loop_edge_lengths_via_footpoint(glats, glons, alts, dates, 'north', 'meridional', step_size=step_size, max_steps=max_steps, edge_length=25.0, edge_steps=5)
(north_mer_drifts_scalar, mind_plus, mind_minus) = closed_loop_edge_lengths_via_footpoint(glats, glons, alts, dates, 'north', 'zonal', step_size=step_size, max_steps=max_steps, edge_length=25.0, edge_steps=5)
# print ('Starting Southern')
(south_zon_drifts_scalar, mind_plus, mind_minus) = closed_loop_edge_lengths_via_footpoint(glats, glons, alts, dates, 'south', 'meridional', step_size=step_size, max_steps=max_steps, edge_length=25.0, edge_steps=5)
(south_mer_drifts_scalar, mind_plus, mind_minus) = closed_loop_edge_lengths_via_footpoint(glats, glons, alts, dates, 'south', 'zonal', step_size=step_size, max_steps=max_steps, edge_length=25.0, edge_steps=5) # print ('Starting Equatorial')
# , step_zon_apex2, mind_plus, mind_minus
eq_zon_drifts_scalar = closed_loop_edge_lengths_via_equator(glats, glons, alts, dates, 'meridional', edge_length=25.0, edge_steps=5) # , step_mer_apex2, mind_plus, mind_minus
eq_mer_drifts_scalar = closed_loop_edge_lengths_via_equator(glats, glons, alts, dates, 'zonal', edge_length=25.0, edge_steps=5)
# print ('Done with core')
north_zon_drifts_scalar = north_zon_drifts_scalar / 50.0
south_zon_drifts_scalar = south_zon_drifts_scalar / 50.0
north_mer_drifts_scalar = north_mer_drifts_scalar / 50.0
south_mer_drifts_scalar = south_mer_drifts_scalar / 50.0 # equatorial
eq_zon_drifts_scalar = 50.0 / eq_zon_drifts_scalar
eq_mer_drifts_scalar = 50.0 / eq_mer_drifts_scalar
if e_field_scaling_only:
# prepare output
out['north_mer_fields_scalar'] = north_zon_drifts_scalar
out['south_mer_fields_scalar'] = south_zon_drifts_scalar
out['north_zon_fields_scalar'] = north_mer_drifts_scalar
out['south_zon_fields_scalar'] = south_mer_drifts_scalar
out['equator_mer_fields_scalar'] = eq_zon_drifts_scalar
out['equator_zon_fields_scalar'] = eq_mer_drifts_scalar # depends on [control=['if'], data=[]]
else:
# figure out scaling for drifts based upon change in magnetic field
# strength
for (ecef_x, ecef_y, ecef_z, glat, glon, alt, date) in zip(ecef_xs, ecef_ys, ecef_zs, glats, glons, alts, dates):
(yr, doy) = pysat.utils.getyrdoy(date)
double_date = float(yr) + float(doy) / 366.0
# get location of apex for s/c field line
(apex_x, apex_y, apex_z, apex_lat, apex_lon, apex_alt) = apex_location_info([glat], [glon], [alt], [date])
# trace to northern footpoint
sc_root = np.array([ecef_x, ecef_y, ecef_z])
trace_north = field_line_trace(sc_root, double_date, 1.0, 120.0, steps=steps, step_size=step_size, max_steps=max_steps)
# southern tracing
trace_south = field_line_trace(sc_root, double_date, -1.0, 120.0, steps=steps, step_size=step_size, max_steps=max_steps)
# footpoint location
north_ftpnt = trace_north[-1, :]
(nft_glat, nft_glon, nft_alt) = ecef_to_geodetic(*north_ftpnt)
south_ftpnt = trace_south[-1, :]
(sft_glat, sft_glon, sft_alt) = ecef_to_geodetic(*south_ftpnt)
# scalar for the northern footpoint electric field based on distances
# for drift also need to include the magnetic field, drift = E/B
(tbn, tbe, tbd, b_sc) = igrf.igrf12syn(0, double_date, 1, alt, np.deg2rad(90.0 - glat), np.deg2rad(glon))
# get mag field and scalar for northern footpoint
(tbn, tbe, tbd, b_nft) = igrf.igrf12syn(0, double_date, 1, nft_alt, np.deg2rad(90.0 - nft_glat), np.deg2rad(nft_glon))
north_mag_scalar.append(b_sc / b_nft)
# equatorial values
(tbn, tbe, tbd, b_eq) = igrf.igrf12syn(0, double_date, 1, apex_alt, np.deg2rad(90.0 - apex_lat), np.deg2rad(apex_lon))
eq_mag_scalar.append(b_sc / b_eq)
# scalar for the southern footpoint
(tbn, tbe, tbd, b_sft) = igrf.igrf12syn(0, double_date, 1, sft_alt, np.deg2rad(90.0 - sft_glat), np.deg2rad(sft_glon))
south_mag_scalar.append(b_sc / b_sft) # depends on [control=['for'], data=[]]
# make E-Field scalars to drifts
# lists to arrays
north_mag_scalar = np.array(north_mag_scalar)
south_mag_scalar = np.array(south_mag_scalar)
eq_mag_scalar = np.array(eq_mag_scalar)
# apply to electric field scaling to get ion drift values
north_zon_drifts_scalar = north_zon_drifts_scalar * north_mag_scalar
south_zon_drifts_scalar = south_zon_drifts_scalar * south_mag_scalar
north_mer_drifts_scalar = north_mer_drifts_scalar * north_mag_scalar
south_mer_drifts_scalar = south_mer_drifts_scalar * south_mag_scalar # equatorial
eq_zon_drifts_scalar = eq_zon_drifts_scalar * eq_mag_scalar
eq_mer_drifts_scalar = eq_mer_drifts_scalar * eq_mag_scalar
# output
out['north_zonal_drifts_scalar'] = north_zon_drifts_scalar
out['south_zonal_drifts_scalar'] = south_zon_drifts_scalar
out['north_mer_drifts_scalar'] = north_mer_drifts_scalar
out['south_mer_drifts_scalar'] = south_mer_drifts_scalar
out['equator_zonal_drifts_scalar'] = eq_zon_drifts_scalar
out['equator_mer_drifts_scalar'] = eq_mer_drifts_scalar
return out
|
def reset_hba(self):
"""Remove all records from pg_hba.conf."""
status = self.get_status()
if status == 'not-initialized':
raise ClusterError(
'cannot modify HBA records: cluster is not initialized')
pg_hba = os.path.join(self._data_dir, 'pg_hba.conf')
try:
with open(pg_hba, 'w'):
pass
except IOError as e:
raise ClusterError(
'cannot modify HBA records: {}'.format(e)) from e
|
def function[reset_hba, parameter[self]]:
constant[Remove all records from pg_hba.conf.]
variable[status] assign[=] call[name[self].get_status, parameter[]]
if compare[name[status] equal[==] constant[not-initialized]] begin[:]
<ast.Raise object at 0x7da204962bc0>
variable[pg_hba] assign[=] call[name[os].path.join, parameter[name[self]._data_dir, constant[pg_hba.conf]]]
<ast.Try object at 0x7da1b1954c40>
|
keyword[def] identifier[reset_hba] ( identifier[self] ):
literal[string]
identifier[status] = identifier[self] . identifier[get_status] ()
keyword[if] identifier[status] == literal[string] :
keyword[raise] identifier[ClusterError] (
literal[string] )
identifier[pg_hba] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_data_dir] , literal[string] )
keyword[try] :
keyword[with] identifier[open] ( identifier[pg_hba] , literal[string] ):
keyword[pass]
keyword[except] identifier[IOError] keyword[as] identifier[e] :
keyword[raise] identifier[ClusterError] (
literal[string] . identifier[format] ( identifier[e] )) keyword[from] identifier[e]
|
def reset_hba(self):
"""Remove all records from pg_hba.conf."""
status = self.get_status()
if status == 'not-initialized':
raise ClusterError('cannot modify HBA records: cluster is not initialized') # depends on [control=['if'], data=[]]
pg_hba = os.path.join(self._data_dir, 'pg_hba.conf')
try:
with open(pg_hba, 'w'):
pass # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except IOError as e:
raise ClusterError('cannot modify HBA records: {}'.format(e)) from e # depends on [control=['except'], data=['e']]
|
def coordinate(self):
"""Returns the maven coordinate of this jar.
:rtype: :class:`pants.java.jar.M2Coordinate`
"""
return M2Coordinate(org=self.org, name=self.name, rev=self.rev, classifier=self.classifier,
ext=self.ext)
|
def function[coordinate, parameter[self]]:
constant[Returns the maven coordinate of this jar.
:rtype: :class:`pants.java.jar.M2Coordinate`
]
return[call[name[M2Coordinate], parameter[]]]
|
keyword[def] identifier[coordinate] ( identifier[self] ):
literal[string]
keyword[return] identifier[M2Coordinate] ( identifier[org] = identifier[self] . identifier[org] , identifier[name] = identifier[self] . identifier[name] , identifier[rev] = identifier[self] . identifier[rev] , identifier[classifier] = identifier[self] . identifier[classifier] ,
identifier[ext] = identifier[self] . identifier[ext] )
|
def coordinate(self):
"""Returns the maven coordinate of this jar.
:rtype: :class:`pants.java.jar.M2Coordinate`
"""
return M2Coordinate(org=self.org, name=self.name, rev=self.rev, classifier=self.classifier, ext=self.ext)
|
def string_literal(content):
"""
Choose a string literal that can wrap our string.
If your string contains a ``\'`` the result will be wrapped in ``\"``.
If your string contains a ``\"`` the result will be wrapped in ``\'``.
Cannot currently handle strings which contain both ``\"`` and ``\'``.
"""
if '"' in content and "'" in content:
# there is no way to escape string literal characters in XPath
raise ValueError("Cannot represent this string in XPath")
if '"' in content: # if it contains " wrap it in '
content = "'%s'" % content
else: # wrap it in "
content = '"%s"' % content
return content
|
def function[string_literal, parameter[content]]:
constant[
Choose a string literal that can wrap our string.
If your string contains a ``'`` the result will be wrapped in ``"``.
If your string contains a ``"`` the result will be wrapped in ``'``.
Cannot currently handle strings which contain both ``"`` and ``'``.
]
if <ast.BoolOp object at 0x7da18bc71cf0> begin[:]
<ast.Raise object at 0x7da237eefca0>
if compare[constant["] in name[content]] begin[:]
variable[content] assign[=] binary_operation[constant['%s'] <ast.Mod object at 0x7da2590d6920> name[content]]
return[name[content]]
|
keyword[def] identifier[string_literal] ( identifier[content] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[content] keyword[and] literal[string] keyword[in] identifier[content] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[content] :
identifier[content] = literal[string] % identifier[content]
keyword[else] :
identifier[content] = literal[string] % identifier[content]
keyword[return] identifier[content]
|
def string_literal(content):
"""
Choose a string literal that can wrap our string.
If your string contains a ``'`` the result will be wrapped in ``"``.
If your string contains a ``"`` the result will be wrapped in ``'``.
Cannot currently handle strings which contain both ``"`` and ``'``.
"""
if '"' in content and "'" in content:
# there is no way to escape string literal characters in XPath
raise ValueError('Cannot represent this string in XPath') # depends on [control=['if'], data=[]]
if '"' in content: # if it contains " wrap it in '
content = "'%s'" % content # depends on [control=['if'], data=['content']]
else: # wrap it in "
content = '"%s"' % content
return content
|
def humanized_model_to_dict(instance, readonly_fields, fields=None, exclude=None):
"""
Returns a dict containing the humanized data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
opts = instance._meta
data = {}
for f in itertools.chain(opts.concrete_fields, opts.private_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue
if fields and f.name not in fields:
continue
if f.name not in readonly_fields:
continue
if exclude and f.name in exclude:
continue
if f.humanized:
data[f.name] = f.humanized(getattr(instance, f.name), instance)
return data
|
def function[humanized_model_to_dict, parameter[instance, readonly_fields, fields, exclude]]:
constant[
Returns a dict containing the humanized data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
]
variable[opts] assign[=] name[instance]._meta
variable[data] assign[=] dictionary[[], []]
for taget[name[f]] in starred[call[name[itertools].chain, parameter[name[opts].concrete_fields, name[opts].private_fields, name[opts].many_to_many]]] begin[:]
if <ast.UnaryOp object at 0x7da2046211e0> begin[:]
continue
if <ast.BoolOp object at 0x7da2046236d0> begin[:]
continue
if compare[name[f].name <ast.NotIn object at 0x7da2590d7190> name[readonly_fields]] begin[:]
continue
if <ast.BoolOp object at 0x7da204623880> begin[:]
continue
if name[f].humanized begin[:]
call[name[data]][name[f].name] assign[=] call[name[f].humanized, parameter[call[name[getattr], parameter[name[instance], name[f].name]], name[instance]]]
return[name[data]]
|
keyword[def] identifier[humanized_model_to_dict] ( identifier[instance] , identifier[readonly_fields] , identifier[fields] = keyword[None] , identifier[exclude] = keyword[None] ):
literal[string]
identifier[opts] = identifier[instance] . identifier[_meta]
identifier[data] ={}
keyword[for] identifier[f] keyword[in] identifier[itertools] . identifier[chain] ( identifier[opts] . identifier[concrete_fields] , identifier[opts] . identifier[private_fields] , identifier[opts] . identifier[many_to_many] ):
keyword[if] keyword[not] identifier[getattr] ( identifier[f] , literal[string] , keyword[False] ):
keyword[continue]
keyword[if] identifier[fields] keyword[and] identifier[f] . identifier[name] keyword[not] keyword[in] identifier[fields] :
keyword[continue]
keyword[if] identifier[f] . identifier[name] keyword[not] keyword[in] identifier[readonly_fields] :
keyword[continue]
keyword[if] identifier[exclude] keyword[and] identifier[f] . identifier[name] keyword[in] identifier[exclude] :
keyword[continue]
keyword[if] identifier[f] . identifier[humanized] :
identifier[data] [ identifier[f] . identifier[name] ]= identifier[f] . identifier[humanized] ( identifier[getattr] ( identifier[instance] , identifier[f] . identifier[name] ), identifier[instance] )
keyword[return] identifier[data]
|
def humanized_model_to_dict(instance, readonly_fields, fields=None, exclude=None):
"""
Returns a dict containing the humanized data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
opts = instance._meta
data = {}
for f in itertools.chain(opts.concrete_fields, opts.private_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue # depends on [control=['if'], data=[]]
if fields and f.name not in fields:
continue # depends on [control=['if'], data=[]]
if f.name not in readonly_fields:
continue # depends on [control=['if'], data=[]]
if exclude and f.name in exclude:
continue # depends on [control=['if'], data=[]]
if f.humanized:
data[f.name] = f.humanized(getattr(instance, f.name), instance) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
return data
|
def read(self, size=None):
"""Read at most size bytes from this buffer.
Bytes read from this buffer are consumed and are permanently removed.
Args:
size: If provided, read no more than size bytes from the buffer.
Otherwise, this reads the entire buffer.
Returns:
The bytes read from this buffer.
"""
if size is None:
size = self.__size
ret_list = []
while size > 0 and self.__buf:
data = self.__buf.popleft()
size -= len(data)
ret_list.append(data)
if size < 0:
ret_list[-1], remainder = ret_list[-1][:size], ret_list[-1][size:]
self.__buf.appendleft(remainder)
ret = b''.join(ret_list)
self.__size -= len(ret)
return ret
|
def function[read, parameter[self, size]]:
constant[Read at most size bytes from this buffer.
Bytes read from this buffer are consumed and are permanently removed.
Args:
size: If provided, read no more than size bytes from the buffer.
Otherwise, this reads the entire buffer.
Returns:
The bytes read from this buffer.
]
if compare[name[size] is constant[None]] begin[:]
variable[size] assign[=] name[self].__size
variable[ret_list] assign[=] list[[]]
while <ast.BoolOp object at 0x7da1b07b9f90> begin[:]
variable[data] assign[=] call[name[self].__buf.popleft, parameter[]]
<ast.AugAssign object at 0x7da1b07f9060>
call[name[ret_list].append, parameter[name[data]]]
if compare[name[size] less[<] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b07f87f0> assign[=] tuple[[<ast.Subscript object at 0x7da1b07fbeb0>, <ast.Subscript object at 0x7da1b07f66b0>]]
call[name[self].__buf.appendleft, parameter[name[remainder]]]
variable[ret] assign[=] call[constant[b''].join, parameter[name[ret_list]]]
<ast.AugAssign object at 0x7da1b07f7fa0>
return[name[ret]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[size] = keyword[None] ):
literal[string]
keyword[if] identifier[size] keyword[is] keyword[None] :
identifier[size] = identifier[self] . identifier[__size]
identifier[ret_list] =[]
keyword[while] identifier[size] > literal[int] keyword[and] identifier[self] . identifier[__buf] :
identifier[data] = identifier[self] . identifier[__buf] . identifier[popleft] ()
identifier[size] -= identifier[len] ( identifier[data] )
identifier[ret_list] . identifier[append] ( identifier[data] )
keyword[if] identifier[size] < literal[int] :
identifier[ret_list] [- literal[int] ], identifier[remainder] = identifier[ret_list] [- literal[int] ][: identifier[size] ], identifier[ret_list] [- literal[int] ][ identifier[size] :]
identifier[self] . identifier[__buf] . identifier[appendleft] ( identifier[remainder] )
identifier[ret] = literal[string] . identifier[join] ( identifier[ret_list] )
identifier[self] . identifier[__size] -= identifier[len] ( identifier[ret] )
keyword[return] identifier[ret]
|
def read(self, size=None):
"""Read at most size bytes from this buffer.
Bytes read from this buffer are consumed and are permanently removed.
Args:
size: If provided, read no more than size bytes from the buffer.
Otherwise, this reads the entire buffer.
Returns:
The bytes read from this buffer.
"""
if size is None:
size = self.__size # depends on [control=['if'], data=['size']]
ret_list = []
while size > 0 and self.__buf:
data = self.__buf.popleft()
size -= len(data)
ret_list.append(data) # depends on [control=['while'], data=[]]
if size < 0:
(ret_list[-1], remainder) = (ret_list[-1][:size], ret_list[-1][size:])
self.__buf.appendleft(remainder) # depends on [control=['if'], data=['size']]
ret = b''.join(ret_list)
self.__size -= len(ret)
return ret
|
def multiscale_permutation_entropy(time_series, m, delay, scale):
"""Calculate the Multiscale Permutation Entropy
Args:
time_series: Time series for analysis
m: Order of permutation entropy
delay: Time delay
scale: Scale factor
Returns:
Vector containing Multiscale Permutation Entropy
Reference:
[1] Francesco Carlo Morabito et al. Multivariate Multi-Scale Permutation Entropy for
Complexity Analysis of Alzheimer’s Disease EEG. www.mdpi.com/1099-4300/14/7/1186
[2] http://www.mathworks.com/matlabcentral/fileexchange/37288-multiscale-permutation-entropy-mpe/content/MPerm.m
"""
mspe = []
for i in range(scale):
coarse_time_series = util_granulate_time_series(time_series, i + 1)
pe = permutation_entropy(coarse_time_series, order=m, delay=delay)
mspe.append(pe)
return mspe
|
def function[multiscale_permutation_entropy, parameter[time_series, m, delay, scale]]:
constant[Calculate the Multiscale Permutation Entropy
Args:
time_series: Time series for analysis
m: Order of permutation entropy
delay: Time delay
scale: Scale factor
Returns:
Vector containing Multiscale Permutation Entropy
Reference:
[1] Francesco Carlo Morabito et al. Multivariate Multi-Scale Permutation Entropy for
Complexity Analysis of Alzheimer’s Disease EEG. www.mdpi.com/1099-4300/14/7/1186
[2] http://www.mathworks.com/matlabcentral/fileexchange/37288-multiscale-permutation-entropy-mpe/content/MPerm.m
]
variable[mspe] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[scale]]]] begin[:]
variable[coarse_time_series] assign[=] call[name[util_granulate_time_series], parameter[name[time_series], binary_operation[name[i] + constant[1]]]]
variable[pe] assign[=] call[name[permutation_entropy], parameter[name[coarse_time_series]]]
call[name[mspe].append, parameter[name[pe]]]
return[name[mspe]]
|
keyword[def] identifier[multiscale_permutation_entropy] ( identifier[time_series] , identifier[m] , identifier[delay] , identifier[scale] ):
literal[string]
identifier[mspe] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[scale] ):
identifier[coarse_time_series] = identifier[util_granulate_time_series] ( identifier[time_series] , identifier[i] + literal[int] )
identifier[pe] = identifier[permutation_entropy] ( identifier[coarse_time_series] , identifier[order] = identifier[m] , identifier[delay] = identifier[delay] )
identifier[mspe] . identifier[append] ( identifier[pe] )
keyword[return] identifier[mspe]
|
def multiscale_permutation_entropy(time_series, m, delay, scale):
"""Calculate the Multiscale Permutation Entropy
Args:
time_series: Time series for analysis
m: Order of permutation entropy
delay: Time delay
scale: Scale factor
Returns:
Vector containing Multiscale Permutation Entropy
Reference:
[1] Francesco Carlo Morabito et al. Multivariate Multi-Scale Permutation Entropy for
Complexity Analysis of Alzheimer’s Disease EEG. www.mdpi.com/1099-4300/14/7/1186
[2] http://www.mathworks.com/matlabcentral/fileexchange/37288-multiscale-permutation-entropy-mpe/content/MPerm.m
"""
mspe = []
for i in range(scale):
coarse_time_series = util_granulate_time_series(time_series, i + 1)
pe = permutation_entropy(coarse_time_series, order=m, delay=delay)
mspe.append(pe) # depends on [control=['for'], data=['i']]
return mspe
|
def splitext(path):
"""splitext for paths with directories that may contain dots.
From https://stackoverflow.com/questions/5930036/separating-file-extensions-using-python-os-path-module"""
li = []
path_without_extensions = os.path.join(os.path.dirname(path),
os.path.basename(path).split(os.extsep)[0])
extensions = os.path.basename(path).split(os.extsep)[1:]
li.append(path_without_extensions)
# li.append(extensions) if you want extensions in another list inside the list that is returned.
li.extend(extensions)
return li
|
def function[splitext, parameter[path]]:
constant[splitext for paths with directories that may contain dots.
From https://stackoverflow.com/questions/5930036/separating-file-extensions-using-python-os-path-module]
variable[li] assign[=] list[[]]
variable[path_without_extensions] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[path]]], call[call[call[name[os].path.basename, parameter[name[path]]].split, parameter[name[os].extsep]]][constant[0]]]]
variable[extensions] assign[=] call[call[call[name[os].path.basename, parameter[name[path]]].split, parameter[name[os].extsep]]][<ast.Slice object at 0x7da1b1005000>]
call[name[li].append, parameter[name[path_without_extensions]]]
call[name[li].extend, parameter[name[extensions]]]
return[name[li]]
|
keyword[def] identifier[splitext] ( identifier[path] ):
literal[string]
identifier[li] =[]
identifier[path_without_extensions] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] ),
identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ). identifier[split] ( identifier[os] . identifier[extsep] )[ literal[int] ])
identifier[extensions] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ). identifier[split] ( identifier[os] . identifier[extsep] )[ literal[int] :]
identifier[li] . identifier[append] ( identifier[path_without_extensions] )
identifier[li] . identifier[extend] ( identifier[extensions] )
keyword[return] identifier[li]
|
def splitext(path):
"""splitext for paths with directories that may contain dots.
From https://stackoverflow.com/questions/5930036/separating-file-extensions-using-python-os-path-module"""
li = []
path_without_extensions = os.path.join(os.path.dirname(path), os.path.basename(path).split(os.extsep)[0])
extensions = os.path.basename(path).split(os.extsep)[1:]
li.append(path_without_extensions)
# li.append(extensions) if you want extensions in another list inside the list that is returned.
li.extend(extensions)
return li
|
def get_withdrawals(self, currency=None, status=None, start=None, end=None, page=None, limit=None):
"""Get deposit records for a currency
https://docs.kucoin.com/#get-withdrawals-list
:param currency: Name of currency (optional)
:type currency: string
:param status: optional - Status of deposit (PROCESSING, SUCCESS, FAILURE)
:type status: string
:param start: (optional) Start time as unix timestamp
:type start: string
:param end: (optional) End time as unix timestamp
:type end: string
:param page: (optional) Page to fetch
:type page: int
:param limit: (optional) Number of transactions
:type limit: int
.. code:: python
withdrawals = client.get_withdrawals('NEO')
:returns: ApiResponse
.. code:: python
{
"currentPage": 1,
"pageSize": 10,
"totalNum": 1,
"totalPage": 1,
"items": [
{
"id": "5c2dc64e03aa675aa263f1ac",
"address": "0x5bedb060b8eb8d823e2414d82acce78d38be7fe9",
"memo": "",
"currency": "ETH",
"amount": 1.0000000,
"fee": 0.0100000,
"walletTxId": "3e2414d82acce78d38be7fe9",
"isInner": false,
"status": "FAILURE",
"createdAt": 1546503758000,
"updatedAt": 1546504603000
}
]
}
:raises: KucoinResponseException, KucoinAPIException
"""
data = {}
if currency:
data['currency'] = currency
if status:
data['status'] = status
if start:
data['startAt'] = start
if end:
data['endAt'] = end
if limit:
data['pageSize'] = limit
if page:
data['page'] = page
return self._get('withdrawals', True, data=data)
|
def function[get_withdrawals, parameter[self, currency, status, start, end, page, limit]]:
constant[Get deposit records for a currency
https://docs.kucoin.com/#get-withdrawals-list
:param currency: Name of currency (optional)
:type currency: string
:param status: optional - Status of deposit (PROCESSING, SUCCESS, FAILURE)
:type status: string
:param start: (optional) Start time as unix timestamp
:type start: string
:param end: (optional) End time as unix timestamp
:type end: string
:param page: (optional) Page to fetch
:type page: int
:param limit: (optional) Number of transactions
:type limit: int
.. code:: python
withdrawals = client.get_withdrawals('NEO')
:returns: ApiResponse
.. code:: python
{
"currentPage": 1,
"pageSize": 10,
"totalNum": 1,
"totalPage": 1,
"items": [
{
"id": "5c2dc64e03aa675aa263f1ac",
"address": "0x5bedb060b8eb8d823e2414d82acce78d38be7fe9",
"memo": "",
"currency": "ETH",
"amount": 1.0000000,
"fee": 0.0100000,
"walletTxId": "3e2414d82acce78d38be7fe9",
"isInner": false,
"status": "FAILURE",
"createdAt": 1546503758000,
"updatedAt": 1546504603000
}
]
}
:raises: KucoinResponseException, KucoinAPIException
]
variable[data] assign[=] dictionary[[], []]
if name[currency] begin[:]
call[name[data]][constant[currency]] assign[=] name[currency]
if name[status] begin[:]
call[name[data]][constant[status]] assign[=] name[status]
if name[start] begin[:]
call[name[data]][constant[startAt]] assign[=] name[start]
if name[end] begin[:]
call[name[data]][constant[endAt]] assign[=] name[end]
if name[limit] begin[:]
call[name[data]][constant[pageSize]] assign[=] name[limit]
if name[page] begin[:]
call[name[data]][constant[page]] assign[=] name[page]
return[call[name[self]._get, parameter[constant[withdrawals], constant[True]]]]
|
keyword[def] identifier[get_withdrawals] ( identifier[self] , identifier[currency] = keyword[None] , identifier[status] = keyword[None] , identifier[start] = keyword[None] , identifier[end] = keyword[None] , identifier[page] = keyword[None] , identifier[limit] = keyword[None] ):
literal[string]
identifier[data] ={}
keyword[if] identifier[currency] :
identifier[data] [ literal[string] ]= identifier[currency]
keyword[if] identifier[status] :
identifier[data] [ literal[string] ]= identifier[status]
keyword[if] identifier[start] :
identifier[data] [ literal[string] ]= identifier[start]
keyword[if] identifier[end] :
identifier[data] [ literal[string] ]= identifier[end]
keyword[if] identifier[limit] :
identifier[data] [ literal[string] ]= identifier[limit]
keyword[if] identifier[page] :
identifier[data] [ literal[string] ]= identifier[page]
keyword[return] identifier[self] . identifier[_get] ( literal[string] , keyword[True] , identifier[data] = identifier[data] )
|
def get_withdrawals(self, currency=None, status=None, start=None, end=None, page=None, limit=None):
"""Get deposit records for a currency
https://docs.kucoin.com/#get-withdrawals-list
:param currency: Name of currency (optional)
:type currency: string
:param status: optional - Status of deposit (PROCESSING, SUCCESS, FAILURE)
:type status: string
:param start: (optional) Start time as unix timestamp
:type start: string
:param end: (optional) End time as unix timestamp
:type end: string
:param page: (optional) Page to fetch
:type page: int
:param limit: (optional) Number of transactions
:type limit: int
.. code:: python
withdrawals = client.get_withdrawals('NEO')
:returns: ApiResponse
.. code:: python
{
"currentPage": 1,
"pageSize": 10,
"totalNum": 1,
"totalPage": 1,
"items": [
{
"id": "5c2dc64e03aa675aa263f1ac",
"address": "0x5bedb060b8eb8d823e2414d82acce78d38be7fe9",
"memo": "",
"currency": "ETH",
"amount": 1.0000000,
"fee": 0.0100000,
"walletTxId": "3e2414d82acce78d38be7fe9",
"isInner": false,
"status": "FAILURE",
"createdAt": 1546503758000,
"updatedAt": 1546504603000
}
]
}
:raises: KucoinResponseException, KucoinAPIException
"""
data = {}
if currency:
data['currency'] = currency # depends on [control=['if'], data=[]]
if status:
data['status'] = status # depends on [control=['if'], data=[]]
if start:
data['startAt'] = start # depends on [control=['if'], data=[]]
if end:
data['endAt'] = end # depends on [control=['if'], data=[]]
if limit:
data['pageSize'] = limit # depends on [control=['if'], data=[]]
if page:
data['page'] = page # depends on [control=['if'], data=[]]
return self._get('withdrawals', True, data=data)
|
def search(self, **kwargs):
"""
:param entity_id: location id
:param entity_type: location type (city, subzone, zone, lanmark, metro , group)
:param q: search keyword
:param start: fetch results after offset
:param count: max number of results to display
:param lat: latitude
:param lon: longitude
:param radius: radius around (lat,lon); to define search area, defined in meters(M)
:param cuisines: list of cuisine id's separated by comma
:param establishment_type: estblishment id obtained from establishments call
:param collection_id: collection id obtained from collections call
:param category: category ids obtained from categories call
:param sort: sort restaurants by (cost, rating, real_distance)
:param order: used with 'sort' parameter to define ascending / descending
:return: json response
The location input can be specified using Zomato location ID or coordinates. Cuisine / Establishment /
Collection IDs can be obtained from respective api calls.
Partner Access is required to access photos and reviews.
Examples:
- To search for 'Italian' restaurants in 'Manhattan, New York City',
set cuisines = 55, entity_id = 94741 and entity_type = zone
- To search for 'cafes' in 'Manhattan, New York City',
set establishment_type = 1, entity_type = zone and entity_id = 94741
- Get list of all restaurants in 'Trending this Week' collection in 'New York City' by using
entity_id = 280, entity_type = city and collection_id = 1
"""
params = {}
available_params = [
"entity_id", "entity_type", "q", "start",
"count", "lat", "lon", "radius", "cuisines",
"establishment_type", "collection_id",
"category", "sort", "order"]
for key in available_params:
if key in kwargs:
params[key] = kwargs[key]
results = self.api.get("/search", params)
return results
|
def function[search, parameter[self]]:
constant[
:param entity_id: location id
:param entity_type: location type (city, subzone, zone, lanmark, metro , group)
:param q: search keyword
:param start: fetch results after offset
:param count: max number of results to display
:param lat: latitude
:param lon: longitude
:param radius: radius around (lat,lon); to define search area, defined in meters(M)
:param cuisines: list of cuisine id's separated by comma
:param establishment_type: estblishment id obtained from establishments call
:param collection_id: collection id obtained from collections call
:param category: category ids obtained from categories call
:param sort: sort restaurants by (cost, rating, real_distance)
:param order: used with 'sort' parameter to define ascending / descending
:return: json response
The location input can be specified using Zomato location ID or coordinates. Cuisine / Establishment /
Collection IDs can be obtained from respective api calls.
Partner Access is required to access photos and reviews.
Examples:
- To search for 'Italian' restaurants in 'Manhattan, New York City',
set cuisines = 55, entity_id = 94741 and entity_type = zone
- To search for 'cafes' in 'Manhattan, New York City',
set establishment_type = 1, entity_type = zone and entity_id = 94741
- Get list of all restaurants in 'Trending this Week' collection in 'New York City' by using
entity_id = 280, entity_type = city and collection_id = 1
]
variable[params] assign[=] dictionary[[], []]
variable[available_params] assign[=] list[[<ast.Constant object at 0x7da18f721c90>, <ast.Constant object at 0x7da18f721480>, <ast.Constant object at 0x7da18f723ee0>, <ast.Constant object at 0x7da18f720ac0>, <ast.Constant object at 0x7da18f722860>, <ast.Constant object at 0x7da18f723700>, <ast.Constant object at 0x7da18f722560>, <ast.Constant object at 0x7da18f7212d0>, <ast.Constant object at 0x7da18f723340>, <ast.Constant object at 0x7da18f7210c0>, <ast.Constant object at 0x7da18f721c60>, <ast.Constant object at 0x7da18f721ea0>, <ast.Constant object at 0x7da18f721750>, <ast.Constant object at 0x7da18f7215d0>]]
for taget[name[key]] in starred[name[available_params]] begin[:]
if compare[name[key] in name[kwargs]] begin[:]
call[name[params]][name[key]] assign[=] call[name[kwargs]][name[key]]
variable[results] assign[=] call[name[self].api.get, parameter[constant[/search], name[params]]]
return[name[results]]
|
keyword[def] identifier[search] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[params] ={}
identifier[available_params] =[
literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[key] keyword[in] identifier[available_params] :
keyword[if] identifier[key] keyword[in] identifier[kwargs] :
identifier[params] [ identifier[key] ]= identifier[kwargs] [ identifier[key] ]
identifier[results] = identifier[self] . identifier[api] . identifier[get] ( literal[string] , identifier[params] )
keyword[return] identifier[results]
|
def search(self, **kwargs):
"""
:param entity_id: location id
:param entity_type: location type (city, subzone, zone, lanmark, metro , group)
:param q: search keyword
:param start: fetch results after offset
:param count: max number of results to display
:param lat: latitude
:param lon: longitude
:param radius: radius around (lat,lon); to define search area, defined in meters(M)
:param cuisines: list of cuisine id's separated by comma
:param establishment_type: estblishment id obtained from establishments call
:param collection_id: collection id obtained from collections call
:param category: category ids obtained from categories call
:param sort: sort restaurants by (cost, rating, real_distance)
:param order: used with 'sort' parameter to define ascending / descending
:return: json response
The location input can be specified using Zomato location ID or coordinates. Cuisine / Establishment /
Collection IDs can be obtained from respective api calls.
Partner Access is required to access photos and reviews.
Examples:
- To search for 'Italian' restaurants in 'Manhattan, New York City',
set cuisines = 55, entity_id = 94741 and entity_type = zone
- To search for 'cafes' in 'Manhattan, New York City',
set establishment_type = 1, entity_type = zone and entity_id = 94741
- Get list of all restaurants in 'Trending this Week' collection in 'New York City' by using
entity_id = 280, entity_type = city and collection_id = 1
"""
params = {}
available_params = ['entity_id', 'entity_type', 'q', 'start', 'count', 'lat', 'lon', 'radius', 'cuisines', 'establishment_type', 'collection_id', 'category', 'sort', 'order']
for key in available_params:
if key in kwargs:
params[key] = kwargs[key] # depends on [control=['if'], data=['key', 'kwargs']] # depends on [control=['for'], data=['key']]
results = self.api.get('/search', params)
return results
|
def run_run(self, run, conf=None, run_conf=None, use_thread=False, catch_exception=True):
'''Runs a run in another thread. Non-blocking.
Parameters
----------
run : class, object
Run class or object.
run_conf : str, dict, file
Specific configuration for the run.
use_thread : bool
If True, run run in thread and returns blocking function.
Returns
-------
If use_thread is True, returns function, which blocks until thread terminates, and which itself returns run status.
If use_thread is False, returns run status.
'''
if isinstance(conf, basestring) and os.path.isfile(conf):
logging.info('Updating configuration from file %s', os.path.abspath(conf))
elif conf is not None:
logging.info('Updating configuration')
conf = self.open_conf(conf)
self._conf.update(conf)
if isclass(run):
# instantiate the class
run = run(conf=self._conf)
local_run_conf = {}
# general parameters from conf
if 'run_conf' in self._conf:
logging.info('Updating run configuration using run_conf key from configuration')
local_run_conf.update(self._conf['run_conf'])
# check for class name, scan specific parameters from conf
if run.__class__.__name__ in self._conf:
logging.info('Updating run configuration using %s key from configuration' % (run.__class__.__name__,))
local_run_conf.update(self._conf[run.__class__.__name__])
if isinstance(run_conf, basestring) and os.path.isfile(run_conf):
logging.info('Updating run configuration from file %s', os.path.abspath(run_conf))
elif run_conf is not None:
logging.info('Updating run configuration')
run_conf = self.open_conf(run_conf)
# check for class name, scan specific parameters from conf
if run.__class__.__name__ in run_conf:
run_conf = run_conf[run.__class__.__name__]
# run_conf parameter has highest priority, updated last
local_run_conf.update(run_conf)
if use_thread:
self.current_run = run
@thunkify(thread_name='RunThread', daemon=True, default_func=self.current_run.get_run_status)
def run_run_in_thread():
return run.run(run_conf=local_run_conf)
signal.signal(signal.SIGINT, self._signal_handler)
logging.info('Press Ctrl-C to stop run')
return run_run_in_thread()
else:
self.current_run = run
status = run.run(run_conf=local_run_conf)
if not catch_exception and status != run_status.finished:
raise RuntimeError('Exception occurred. Please read the log.')
return status
|
def function[run_run, parameter[self, run, conf, run_conf, use_thread, catch_exception]]:
constant[Runs a run in another thread. Non-blocking.
Parameters
----------
run : class, object
Run class or object.
run_conf : str, dict, file
Specific configuration for the run.
use_thread : bool
If True, run run in thread and returns blocking function.
Returns
-------
If use_thread is True, returns function, which blocks until thread terminates, and which itself returns run status.
If use_thread is False, returns run status.
]
if <ast.BoolOp object at 0x7da20c76f430> begin[:]
call[name[logging].info, parameter[constant[Updating configuration from file %s], call[name[os].path.abspath, parameter[name[conf]]]]]
variable[conf] assign[=] call[name[self].open_conf, parameter[name[conf]]]
call[name[self]._conf.update, parameter[name[conf]]]
if call[name[isclass], parameter[name[run]]] begin[:]
variable[run] assign[=] call[name[run], parameter[]]
variable[local_run_conf] assign[=] dictionary[[], []]
if compare[constant[run_conf] in name[self]._conf] begin[:]
call[name[logging].info, parameter[constant[Updating run configuration using run_conf key from configuration]]]
call[name[local_run_conf].update, parameter[call[name[self]._conf][constant[run_conf]]]]
if compare[name[run].__class__.__name__ in name[self]._conf] begin[:]
call[name[logging].info, parameter[binary_operation[constant[Updating run configuration using %s key from configuration] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b10d5d80>]]]]]
call[name[local_run_conf].update, parameter[call[name[self]._conf][name[run].__class__.__name__]]]
if <ast.BoolOp object at 0x7da1b10e7c70> begin[:]
call[name[logging].info, parameter[constant[Updating run configuration from file %s], call[name[os].path.abspath, parameter[name[run_conf]]]]]
variable[run_conf] assign[=] call[name[self].open_conf, parameter[name[run_conf]]]
if compare[name[run].__class__.__name__ in name[run_conf]] begin[:]
variable[run_conf] assign[=] call[name[run_conf]][name[run].__class__.__name__]
call[name[local_run_conf].update, parameter[name[run_conf]]]
if name[use_thread] begin[:]
name[self].current_run assign[=] name[run]
def function[run_run_in_thread, parameter[]]:
return[call[name[run].run, parameter[]]]
call[name[signal].signal, parameter[name[signal].SIGINT, name[self]._signal_handler]]
call[name[logging].info, parameter[constant[Press Ctrl-C to stop run]]]
return[call[name[run_run_in_thread], parameter[]]]
|
keyword[def] identifier[run_run] ( identifier[self] , identifier[run] , identifier[conf] = keyword[None] , identifier[run_conf] = keyword[None] , identifier[use_thread] = keyword[False] , identifier[catch_exception] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[conf] , identifier[basestring] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[conf] ):
identifier[logging] . identifier[info] ( literal[string] , identifier[os] . identifier[path] . identifier[abspath] ( identifier[conf] ))
keyword[elif] identifier[conf] keyword[is] keyword[not] keyword[None] :
identifier[logging] . identifier[info] ( literal[string] )
identifier[conf] = identifier[self] . identifier[open_conf] ( identifier[conf] )
identifier[self] . identifier[_conf] . identifier[update] ( identifier[conf] )
keyword[if] identifier[isclass] ( identifier[run] ):
identifier[run] = identifier[run] ( identifier[conf] = identifier[self] . identifier[_conf] )
identifier[local_run_conf] ={}
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_conf] :
identifier[logging] . identifier[info] ( literal[string] )
identifier[local_run_conf] . identifier[update] ( identifier[self] . identifier[_conf] [ literal[string] ])
keyword[if] identifier[run] . identifier[__class__] . identifier[__name__] keyword[in] identifier[self] . identifier[_conf] :
identifier[logging] . identifier[info] ( literal[string] %( identifier[run] . identifier[__class__] . identifier[__name__] ,))
identifier[local_run_conf] . identifier[update] ( identifier[self] . identifier[_conf] [ identifier[run] . identifier[__class__] . identifier[__name__] ])
keyword[if] identifier[isinstance] ( identifier[run_conf] , identifier[basestring] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[run_conf] ):
identifier[logging] . identifier[info] ( literal[string] , identifier[os] . identifier[path] . identifier[abspath] ( identifier[run_conf] ))
keyword[elif] identifier[run_conf] keyword[is] keyword[not] keyword[None] :
identifier[logging] . identifier[info] ( literal[string] )
identifier[run_conf] = identifier[self] . identifier[open_conf] ( identifier[run_conf] )
keyword[if] identifier[run] . identifier[__class__] . identifier[__name__] keyword[in] identifier[run_conf] :
identifier[run_conf] = identifier[run_conf] [ identifier[run] . identifier[__class__] . identifier[__name__] ]
identifier[local_run_conf] . identifier[update] ( identifier[run_conf] )
keyword[if] identifier[use_thread] :
identifier[self] . identifier[current_run] = identifier[run]
@ identifier[thunkify] ( identifier[thread_name] = literal[string] , identifier[daemon] = keyword[True] , identifier[default_func] = identifier[self] . identifier[current_run] . identifier[get_run_status] )
keyword[def] identifier[run_run_in_thread] ():
keyword[return] identifier[run] . identifier[run] ( identifier[run_conf] = identifier[local_run_conf] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[self] . identifier[_signal_handler] )
identifier[logging] . identifier[info] ( literal[string] )
keyword[return] identifier[run_run_in_thread] ()
keyword[else] :
identifier[self] . identifier[current_run] = identifier[run]
identifier[status] = identifier[run] . identifier[run] ( identifier[run_conf] = identifier[local_run_conf] )
keyword[if] keyword[not] identifier[catch_exception] keyword[and] identifier[status] != identifier[run_status] . identifier[finished] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] identifier[status]
|
def run_run(self, run, conf=None, run_conf=None, use_thread=False, catch_exception=True):
"""Runs a run in another thread. Non-blocking.
Parameters
----------
run : class, object
Run class or object.
run_conf : str, dict, file
Specific configuration for the run.
use_thread : bool
If True, run run in thread and returns blocking function.
Returns
-------
If use_thread is True, returns function, which blocks until thread terminates, and which itself returns run status.
If use_thread is False, returns run status.
"""
if isinstance(conf, basestring) and os.path.isfile(conf):
logging.info('Updating configuration from file %s', os.path.abspath(conf)) # depends on [control=['if'], data=[]]
elif conf is not None:
logging.info('Updating configuration') # depends on [control=['if'], data=[]]
conf = self.open_conf(conf)
self._conf.update(conf)
if isclass(run):
# instantiate the class
run = run(conf=self._conf) # depends on [control=['if'], data=[]]
local_run_conf = {}
# general parameters from conf
if 'run_conf' in self._conf:
logging.info('Updating run configuration using run_conf key from configuration')
local_run_conf.update(self._conf['run_conf']) # depends on [control=['if'], data=[]]
# check for class name, scan specific parameters from conf
if run.__class__.__name__ in self._conf:
logging.info('Updating run configuration using %s key from configuration' % (run.__class__.__name__,))
local_run_conf.update(self._conf[run.__class__.__name__]) # depends on [control=['if'], data=[]]
if isinstance(run_conf, basestring) and os.path.isfile(run_conf):
logging.info('Updating run configuration from file %s', os.path.abspath(run_conf)) # depends on [control=['if'], data=[]]
elif run_conf is not None:
logging.info('Updating run configuration') # depends on [control=['if'], data=[]]
run_conf = self.open_conf(run_conf)
# check for class name, scan specific parameters from conf
if run.__class__.__name__ in run_conf:
run_conf = run_conf[run.__class__.__name__] # depends on [control=['if'], data=['run_conf']]
# run_conf parameter has highest priority, updated last
local_run_conf.update(run_conf)
if use_thread:
self.current_run = run
@thunkify(thread_name='RunThread', daemon=True, default_func=self.current_run.get_run_status)
def run_run_in_thread():
return run.run(run_conf=local_run_conf)
signal.signal(signal.SIGINT, self._signal_handler)
logging.info('Press Ctrl-C to stop run')
return run_run_in_thread() # depends on [control=['if'], data=[]]
else:
self.current_run = run
status = run.run(run_conf=local_run_conf)
if not catch_exception and status != run_status.finished:
raise RuntimeError('Exception occurred. Please read the log.') # depends on [control=['if'], data=[]]
return status
|
def within_bounds(
self,
start: timelike,
stop: timelike,
bounds: Union[BaseGeometry, Tuple[float, float, float, float]],
) -> Optional[pd.DataFrame]:
"""EXPERIMENTAL."""
start = to_datetime(start)
stop = to_datetime(stop)
before_hour = round_time(start, "before")
after_hour = round_time(stop, "after")
try:
# thinking of shapely bounds attribute (in this order)
# I just don't want to add the shapely dependency here
west, south, east, north = bounds.bounds # type: ignore
except AttributeError:
west, south, east, north = bounds
other_params = "and lon>={} and lon<={} ".format(west, east)
other_params += "and lat>={} and lat<={} ".format(south, north)
query = self.basic_request.format(
columns="icao24, callsign, s.ITEM as serial, count(*) as count",
other_tables=", state_vectors_data4.serials s",
before_time=start.timestamp(),
after_time=stop.timestamp(),
before_hour=before_hour.timestamp(),
after_hour=after_hour.timestamp(),
other_params=other_params + "group by icao24, callsign, s.ITEM",
)
logging.info(f"Sending request: {query}")
df = self._impala(query, columns="icao24, callsign, serial, count")
if df is None:
return None
df = df[df["count"] != "count"]
df["count"] = df["count"].astype(int)
return df
|
def function[within_bounds, parameter[self, start, stop, bounds]]:
constant[EXPERIMENTAL.]
variable[start] assign[=] call[name[to_datetime], parameter[name[start]]]
variable[stop] assign[=] call[name[to_datetime], parameter[name[stop]]]
variable[before_hour] assign[=] call[name[round_time], parameter[name[start], constant[before]]]
variable[after_hour] assign[=] call[name[round_time], parameter[name[stop], constant[after]]]
<ast.Try object at 0x7da18fe91c00>
variable[other_params] assign[=] call[constant[and lon>={} and lon<={} ].format, parameter[name[west], name[east]]]
<ast.AugAssign object at 0x7da20c6e4580>
variable[query] assign[=] call[name[self].basic_request.format, parameter[]]
call[name[logging].info, parameter[<ast.JoinedStr object at 0x7da20c6e76d0>]]
variable[df] assign[=] call[name[self]._impala, parameter[name[query]]]
if compare[name[df] is constant[None]] begin[:]
return[constant[None]]
variable[df] assign[=] call[name[df]][compare[call[name[df]][constant[count]] not_equal[!=] constant[count]]]
call[name[df]][constant[count]] assign[=] call[call[name[df]][constant[count]].astype, parameter[name[int]]]
return[name[df]]
|
keyword[def] identifier[within_bounds] (
identifier[self] ,
identifier[start] : identifier[timelike] ,
identifier[stop] : identifier[timelike] ,
identifier[bounds] : identifier[Union] [ identifier[BaseGeometry] , identifier[Tuple] [ identifier[float] , identifier[float] , identifier[float] , identifier[float] ]],
)-> identifier[Optional] [ identifier[pd] . identifier[DataFrame] ]:
literal[string]
identifier[start] = identifier[to_datetime] ( identifier[start] )
identifier[stop] = identifier[to_datetime] ( identifier[stop] )
identifier[before_hour] = identifier[round_time] ( identifier[start] , literal[string] )
identifier[after_hour] = identifier[round_time] ( identifier[stop] , literal[string] )
keyword[try] :
identifier[west] , identifier[south] , identifier[east] , identifier[north] = identifier[bounds] . identifier[bounds]
keyword[except] identifier[AttributeError] :
identifier[west] , identifier[south] , identifier[east] , identifier[north] = identifier[bounds]
identifier[other_params] = literal[string] . identifier[format] ( identifier[west] , identifier[east] )
identifier[other_params] += literal[string] . identifier[format] ( identifier[south] , identifier[north] )
identifier[query] = identifier[self] . identifier[basic_request] . identifier[format] (
identifier[columns] = literal[string] ,
identifier[other_tables] = literal[string] ,
identifier[before_time] = identifier[start] . identifier[timestamp] (),
identifier[after_time] = identifier[stop] . identifier[timestamp] (),
identifier[before_hour] = identifier[before_hour] . identifier[timestamp] (),
identifier[after_hour] = identifier[after_hour] . identifier[timestamp] (),
identifier[other_params] = identifier[other_params] + literal[string] ,
)
identifier[logging] . identifier[info] ( literal[string] )
identifier[df] = identifier[self] . identifier[_impala] ( identifier[query] , identifier[columns] = literal[string] )
keyword[if] identifier[df] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]!= literal[string] ]
identifier[df] [ literal[string] ]= identifier[df] [ literal[string] ]. identifier[astype] ( identifier[int] )
keyword[return] identifier[df]
|
def within_bounds(self, start: timelike, stop: timelike, bounds: Union[BaseGeometry, Tuple[float, float, float, float]]) -> Optional[pd.DataFrame]:
"""EXPERIMENTAL."""
start = to_datetime(start)
stop = to_datetime(stop)
before_hour = round_time(start, 'before')
after_hour = round_time(stop, 'after')
try:
# thinking of shapely bounds attribute (in this order)
# I just don't want to add the shapely dependency here
(west, south, east, north) = bounds.bounds # type: ignore # depends on [control=['try'], data=[]]
except AttributeError:
(west, south, east, north) = bounds # depends on [control=['except'], data=[]]
other_params = 'and lon>={} and lon<={} '.format(west, east)
other_params += 'and lat>={} and lat<={} '.format(south, north)
query = self.basic_request.format(columns='icao24, callsign, s.ITEM as serial, count(*) as count', other_tables=', state_vectors_data4.serials s', before_time=start.timestamp(), after_time=stop.timestamp(), before_hour=before_hour.timestamp(), after_hour=after_hour.timestamp(), other_params=other_params + 'group by icao24, callsign, s.ITEM')
logging.info(f'Sending request: {query}')
df = self._impala(query, columns='icao24, callsign, serial, count')
if df is None:
return None # depends on [control=['if'], data=[]]
df = df[df['count'] != 'count']
df['count'] = df['count'].astype(int)
return df
|
def mount_configure_send(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw, force_mavlink1=False):
'''
Message to configure a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t)
stab_roll : (1 = yes, 0 = no) (uint8_t)
stab_pitch : (1 = yes, 0 = no) (uint8_t)
stab_yaw : (1 = yes, 0 = no) (uint8_t)
'''
return self.send(self.mount_configure_encode(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw), force_mavlink1=force_mavlink1)
|
def function[mount_configure_send, parameter[self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw, force_mavlink1]]:
constant[
Message to configure a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t)
stab_roll : (1 = yes, 0 = no) (uint8_t)
stab_pitch : (1 = yes, 0 = no) (uint8_t)
stab_yaw : (1 = yes, 0 = no) (uint8_t)
]
return[call[name[self].send, parameter[call[name[self].mount_configure_encode, parameter[name[target_system], name[target_component], name[mount_mode], name[stab_roll], name[stab_pitch], name[stab_yaw]]]]]]
|
keyword[def] identifier[mount_configure_send] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[mount_mode] , identifier[stab_roll] , identifier[stab_pitch] , identifier[stab_yaw] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[mount_configure_encode] ( identifier[target_system] , identifier[target_component] , identifier[mount_mode] , identifier[stab_roll] , identifier[stab_pitch] , identifier[stab_yaw] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
|
def mount_configure_send(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw, force_mavlink1=False):
"""
Message to configure a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t)
stab_roll : (1 = yes, 0 = no) (uint8_t)
stab_pitch : (1 = yes, 0 = no) (uint8_t)
stab_yaw : (1 = yes, 0 = no) (uint8_t)
"""
return self.send(self.mount_configure_encode(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw), force_mavlink1=force_mavlink1)
|
def __analizar_evento(self, ret):
"Comprueba y extrae el wvento informativo si existen en la respuesta XML"
evt = ret.get('evento')
if evt:
self.Eventos = [evt]
self.Evento = "%(codigo)s: %(descripcion)s" % evt
|
def function[__analizar_evento, parameter[self, ret]]:
constant[Comprueba y extrae el wvento informativo si existen en la respuesta XML]
variable[evt] assign[=] call[name[ret].get, parameter[constant[evento]]]
if name[evt] begin[:]
name[self].Eventos assign[=] list[[<ast.Name object at 0x7da1b1e00b80>]]
name[self].Evento assign[=] binary_operation[constant[%(codigo)s: %(descripcion)s] <ast.Mod object at 0x7da2590d6920> name[evt]]
|
keyword[def] identifier[__analizar_evento] ( identifier[self] , identifier[ret] ):
literal[string]
identifier[evt] = identifier[ret] . identifier[get] ( literal[string] )
keyword[if] identifier[evt] :
identifier[self] . identifier[Eventos] =[ identifier[evt] ]
identifier[self] . identifier[Evento] = literal[string] % identifier[evt]
|
def __analizar_evento(self, ret):
"""Comprueba y extrae el wvento informativo si existen en la respuesta XML"""
evt = ret.get('evento')
if evt:
self.Eventos = [evt]
self.Evento = '%(codigo)s: %(descripcion)s' % evt # depends on [control=['if'], data=[]]
|
def get_track_scrobbles(self, artist, track, cacheable=False):
"""
Get a list of this user's scrobbles of this artist's track,
including scrobble time.
"""
params = self._get_params()
params["artist"] = artist
params["track"] = track
seq = []
for track in _collect_nodes(
None, self, self.ws_prefix + ".getTrackScrobbles", cacheable, params
):
title = _extract(track, "name")
artist = _extract(track, "artist")
date = _extract(track, "date")
album = _extract(track, "album")
timestamp = track.getElementsByTagName("date")[0].getAttribute("uts")
seq.append(
PlayedTrack(Track(artist, title, self.network), album, date, timestamp)
)
return seq
|
def function[get_track_scrobbles, parameter[self, artist, track, cacheable]]:
constant[
Get a list of this user's scrobbles of this artist's track,
including scrobble time.
]
variable[params] assign[=] call[name[self]._get_params, parameter[]]
call[name[params]][constant[artist]] assign[=] name[artist]
call[name[params]][constant[track]] assign[=] name[track]
variable[seq] assign[=] list[[]]
for taget[name[track]] in starred[call[name[_collect_nodes], parameter[constant[None], name[self], binary_operation[name[self].ws_prefix + constant[.getTrackScrobbles]], name[cacheable], name[params]]]] begin[:]
variable[title] assign[=] call[name[_extract], parameter[name[track], constant[name]]]
variable[artist] assign[=] call[name[_extract], parameter[name[track], constant[artist]]]
variable[date] assign[=] call[name[_extract], parameter[name[track], constant[date]]]
variable[album] assign[=] call[name[_extract], parameter[name[track], constant[album]]]
variable[timestamp] assign[=] call[call[call[name[track].getElementsByTagName, parameter[constant[date]]]][constant[0]].getAttribute, parameter[constant[uts]]]
call[name[seq].append, parameter[call[name[PlayedTrack], parameter[call[name[Track], parameter[name[artist], name[title], name[self].network]], name[album], name[date], name[timestamp]]]]]
return[name[seq]]
|
keyword[def] identifier[get_track_scrobbles] ( identifier[self] , identifier[artist] , identifier[track] , identifier[cacheable] = keyword[False] ):
literal[string]
identifier[params] = identifier[self] . identifier[_get_params] ()
identifier[params] [ literal[string] ]= identifier[artist]
identifier[params] [ literal[string] ]= identifier[track]
identifier[seq] =[]
keyword[for] identifier[track] keyword[in] identifier[_collect_nodes] (
keyword[None] , identifier[self] , identifier[self] . identifier[ws_prefix] + literal[string] , identifier[cacheable] , identifier[params]
):
identifier[title] = identifier[_extract] ( identifier[track] , literal[string] )
identifier[artist] = identifier[_extract] ( identifier[track] , literal[string] )
identifier[date] = identifier[_extract] ( identifier[track] , literal[string] )
identifier[album] = identifier[_extract] ( identifier[track] , literal[string] )
identifier[timestamp] = identifier[track] . identifier[getElementsByTagName] ( literal[string] )[ literal[int] ]. identifier[getAttribute] ( literal[string] )
identifier[seq] . identifier[append] (
identifier[PlayedTrack] ( identifier[Track] ( identifier[artist] , identifier[title] , identifier[self] . identifier[network] ), identifier[album] , identifier[date] , identifier[timestamp] )
)
keyword[return] identifier[seq]
|
def get_track_scrobbles(self, artist, track, cacheable=False):
"""
Get a list of this user's scrobbles of this artist's track,
including scrobble time.
"""
params = self._get_params()
params['artist'] = artist
params['track'] = track
seq = []
for track in _collect_nodes(None, self, self.ws_prefix + '.getTrackScrobbles', cacheable, params):
title = _extract(track, 'name')
artist = _extract(track, 'artist')
date = _extract(track, 'date')
album = _extract(track, 'album')
timestamp = track.getElementsByTagName('date')[0].getAttribute('uts')
seq.append(PlayedTrack(Track(artist, title, self.network), album, date, timestamp)) # depends on [control=['for'], data=['track']]
return seq
|
def setTags(self, tags):
"""Set the tags for current photo to list tags.
(flickr.photos.settags)
"""
method = 'flickr.photos.setTags'
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
self._load_properties()
|
def function[setTags, parameter[self, tags]]:
constant[Set the tags for current photo to list tags.
(flickr.photos.settags)
]
variable[method] assign[=] constant[flickr.photos.setTags]
variable[tags] assign[=] call[name[uniq], parameter[name[tags]]]
call[name[_dopost], parameter[name[method]]]
call[name[self]._load_properties, parameter[]]
|
keyword[def] identifier[setTags] ( identifier[self] , identifier[tags] ):
literal[string]
identifier[method] = literal[string]
identifier[tags] = identifier[uniq] ( identifier[tags] )
identifier[_dopost] ( identifier[method] , identifier[auth] = keyword[True] , identifier[photo_id] = identifier[self] . identifier[id] , identifier[tags] = identifier[tags] )
identifier[self] . identifier[_load_properties] ()
|
def setTags(self, tags):
"""Set the tags for current photo to list tags.
(flickr.photos.settags)
"""
method = 'flickr.photos.setTags'
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
self._load_properties()
|
def _has_valid_abs_ref(self, i, construction_table):
"""Checks, if ``i`` uses valid absolute references.
Checks for each index from first to third row of the
``construction_table``, if the references are colinear.
This case has to be specially treated, because the references
are not only atoms (to fix internal degrees of freedom) but also points
in cartesian space called absolute references.
(to fix translational and rotational degrees of freedom)
Args:
i (label): The label has to be in the first three rows.
construction_table (pd.DataFrame):
Returns:
bool:
"""
c_table = construction_table
abs_refs = constants.absolute_refs
A = np.empty((3, 3))
row = c_table.index.get_loc(i)
if row > 2:
message = 'The index {i} is not from the first three, rows'.format
raise ValueError(message(i=i))
for k in range(3):
if k < row:
A[k] = self.loc[c_table.iloc[row, k], ['x', 'y', 'z']]
else:
A[k] = abs_refs[c_table.iloc[row, k]]
v1, v2 = A[2] - A[1], A[1] - A[0]
K = np.cross(v1, v2)
zero = np.full(3, 0.)
return not (np.allclose(K, zero) or np.allclose(v1, zero)
or np.allclose(v2, zero))
|
def function[_has_valid_abs_ref, parameter[self, i, construction_table]]:
constant[Checks, if ``i`` uses valid absolute references.
Checks for each index from first to third row of the
``construction_table``, if the references are colinear.
This case has to be specially treated, because the references
are not only atoms (to fix internal degrees of freedom) but also points
in cartesian space called absolute references.
(to fix translational and rotational degrees of freedom)
Args:
i (label): The label has to be in the first three rows.
construction_table (pd.DataFrame):
Returns:
bool:
]
variable[c_table] assign[=] name[construction_table]
variable[abs_refs] assign[=] name[constants].absolute_refs
variable[A] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da1b26f00a0>, <ast.Constant object at 0x7da1b26f3d60>]]]]
variable[row] assign[=] call[name[c_table].index.get_loc, parameter[name[i]]]
if compare[name[row] greater[>] constant[2]] begin[:]
variable[message] assign[=] constant[The index {i} is not from the first three, rows].format
<ast.Raise object at 0x7da1b26f2fe0>
for taget[name[k]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
if compare[name[k] less[<] name[row]] begin[:]
call[name[A]][name[k]] assign[=] call[name[self].loc][tuple[[<ast.Subscript object at 0x7da1b27bb7c0>, <ast.List object at 0x7da1b27b9ff0>]]]
<ast.Tuple object at 0x7da1b27bb760> assign[=] tuple[[<ast.BinOp object at 0x7da1b27bb1f0>, <ast.BinOp object at 0x7da1b27ba4d0>]]
variable[K] assign[=] call[name[np].cross, parameter[name[v1], name[v2]]]
variable[zero] assign[=] call[name[np].full, parameter[constant[3], constant[0.0]]]
return[<ast.UnaryOp object at 0x7da1b27b8ee0>]
|
keyword[def] identifier[_has_valid_abs_ref] ( identifier[self] , identifier[i] , identifier[construction_table] ):
literal[string]
identifier[c_table] = identifier[construction_table]
identifier[abs_refs] = identifier[constants] . identifier[absolute_refs]
identifier[A] = identifier[np] . identifier[empty] (( literal[int] , literal[int] ))
identifier[row] = identifier[c_table] . identifier[index] . identifier[get_loc] ( identifier[i] )
keyword[if] identifier[row] > literal[int] :
identifier[message] = literal[string] . identifier[format]
keyword[raise] identifier[ValueError] ( identifier[message] ( identifier[i] = identifier[i] ))
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] ):
keyword[if] identifier[k] < identifier[row] :
identifier[A] [ identifier[k] ]= identifier[self] . identifier[loc] [ identifier[c_table] . identifier[iloc] [ identifier[row] , identifier[k] ],[ literal[string] , literal[string] , literal[string] ]]
keyword[else] :
identifier[A] [ identifier[k] ]= identifier[abs_refs] [ identifier[c_table] . identifier[iloc] [ identifier[row] , identifier[k] ]]
identifier[v1] , identifier[v2] = identifier[A] [ literal[int] ]- identifier[A] [ literal[int] ], identifier[A] [ literal[int] ]- identifier[A] [ literal[int] ]
identifier[K] = identifier[np] . identifier[cross] ( identifier[v1] , identifier[v2] )
identifier[zero] = identifier[np] . identifier[full] ( literal[int] , literal[int] )
keyword[return] keyword[not] ( identifier[np] . identifier[allclose] ( identifier[K] , identifier[zero] ) keyword[or] identifier[np] . identifier[allclose] ( identifier[v1] , identifier[zero] )
keyword[or] identifier[np] . identifier[allclose] ( identifier[v2] , identifier[zero] ))
|
def _has_valid_abs_ref(self, i, construction_table):
"""Checks, if ``i`` uses valid absolute references.
Checks for each index from first to third row of the
``construction_table``, if the references are colinear.
This case has to be specially treated, because the references
are not only atoms (to fix internal degrees of freedom) but also points
in cartesian space called absolute references.
(to fix translational and rotational degrees of freedom)
Args:
i (label): The label has to be in the first three rows.
construction_table (pd.DataFrame):
Returns:
bool:
"""
c_table = construction_table
abs_refs = constants.absolute_refs
A = np.empty((3, 3))
row = c_table.index.get_loc(i)
if row > 2:
message = 'The index {i} is not from the first three, rows'.format
raise ValueError(message(i=i)) # depends on [control=['if'], data=[]]
for k in range(3):
if k < row:
A[k] = self.loc[c_table.iloc[row, k], ['x', 'y', 'z']] # depends on [control=['if'], data=['k', 'row']]
else:
A[k] = abs_refs[c_table.iloc[row, k]] # depends on [control=['for'], data=['k']]
(v1, v2) = (A[2] - A[1], A[1] - A[0])
K = np.cross(v1, v2)
zero = np.full(3, 0.0)
return not (np.allclose(K, zero) or np.allclose(v1, zero) or np.allclose(v2, zero))
|
def _generateChildren(self):
"""Generator which yields all AXChildren of the object."""
try:
children = self.AXChildren
except _a11y.Error:
return
if children:
for child in children:
yield child
|
def function[_generateChildren, parameter[self]]:
constant[Generator which yields all AXChildren of the object.]
<ast.Try object at 0x7da18f810ee0>
if name[children] begin[:]
for taget[name[child]] in starred[name[children]] begin[:]
<ast.Yield object at 0x7da18f09e350>
|
keyword[def] identifier[_generateChildren] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[children] = identifier[self] . identifier[AXChildren]
keyword[except] identifier[_a11y] . identifier[Error] :
keyword[return]
keyword[if] identifier[children] :
keyword[for] identifier[child] keyword[in] identifier[children] :
keyword[yield] identifier[child]
|
def _generateChildren(self):
"""Generator which yields all AXChildren of the object."""
try:
children = self.AXChildren # depends on [control=['try'], data=[]]
except _a11y.Error:
return # depends on [control=['except'], data=[]]
if children:
for child in children:
yield child # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]]
|
def _get_activation(self, F, inputs, activation, **kwargs):
"""Get activation function. Convert if is string"""
func = {'tanh': F.tanh,
'relu': F.relu,
'sigmoid': F.sigmoid,
'softsign': F.softsign}.get(activation)
if func:
return func(inputs, **kwargs)
elif isinstance(activation, string_types):
return F.Activation(inputs, act_type=activation, **kwargs)
elif isinstance(activation, LeakyReLU):
return F.LeakyReLU(inputs, act_type='leaky', slope=activation._alpha, **kwargs)
return activation(inputs, **kwargs)
|
def function[_get_activation, parameter[self, F, inputs, activation]]:
constant[Get activation function. Convert if is string]
variable[func] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b20f9750>, <ast.Constant object at 0x7da1b1fe3520>, <ast.Constant object at 0x7da1b1fe33a0>, <ast.Constant object at 0x7da1b1fe2d40>], [<ast.Attribute object at 0x7da1b1fe2170>, <ast.Attribute object at 0x7da1b1fe3dc0>, <ast.Attribute object at 0x7da1b1fe3880>, <ast.Attribute object at 0x7da1b1fe30a0>]].get, parameter[name[activation]]]
if name[func] begin[:]
return[call[name[func], parameter[name[inputs]]]]
return[call[name[activation], parameter[name[inputs]]]]
|
keyword[def] identifier[_get_activation] ( identifier[self] , identifier[F] , identifier[inputs] , identifier[activation] ,** identifier[kwargs] ):
literal[string]
identifier[func] ={ literal[string] : identifier[F] . identifier[tanh] ,
literal[string] : identifier[F] . identifier[relu] ,
literal[string] : identifier[F] . identifier[sigmoid] ,
literal[string] : identifier[F] . identifier[softsign] }. identifier[get] ( identifier[activation] )
keyword[if] identifier[func] :
keyword[return] identifier[func] ( identifier[inputs] ,** identifier[kwargs] )
keyword[elif] identifier[isinstance] ( identifier[activation] , identifier[string_types] ):
keyword[return] identifier[F] . identifier[Activation] ( identifier[inputs] , identifier[act_type] = identifier[activation] ,** identifier[kwargs] )
keyword[elif] identifier[isinstance] ( identifier[activation] , identifier[LeakyReLU] ):
keyword[return] identifier[F] . identifier[LeakyReLU] ( identifier[inputs] , identifier[act_type] = literal[string] , identifier[slope] = identifier[activation] . identifier[_alpha] ,** identifier[kwargs] )
keyword[return] identifier[activation] ( identifier[inputs] ,** identifier[kwargs] )
|
def _get_activation(self, F, inputs, activation, **kwargs):
"""Get activation function. Convert if is string"""
func = {'tanh': F.tanh, 'relu': F.relu, 'sigmoid': F.sigmoid, 'softsign': F.softsign}.get(activation)
if func:
return func(inputs, **kwargs) # depends on [control=['if'], data=[]]
elif isinstance(activation, string_types):
return F.Activation(inputs, act_type=activation, **kwargs) # depends on [control=['if'], data=[]]
elif isinstance(activation, LeakyReLU):
return F.LeakyReLU(inputs, act_type='leaky', slope=activation._alpha, **kwargs) # depends on [control=['if'], data=[]]
return activation(inputs, **kwargs)
|
def define_code_breakpoint(self, dwProcessId, address, condition = True,
action = None):
"""
Creates a disabled code breakpoint at the given address.
@see:
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint},
L{erase_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the code instruction to break at.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{CodeBreakpoint}
@return: The code breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = CodeBreakpoint(address, condition, action)
key = (dwProcessId, bp.get_address())
if key in self.__codeBP:
msg = "Already exists (PID %d) : %r"
raise KeyError(msg % (dwProcessId, self.__codeBP[key]))
self.__codeBP[key] = bp
return bp
|
def function[define_code_breakpoint, parameter[self, dwProcessId, address, condition, action]]:
constant[
Creates a disabled code breakpoint at the given address.
@see:
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint},
L{erase_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the code instruction to break at.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{CodeBreakpoint}
@return: The code breakpoint object.
]
variable[process] assign[=] call[name[self].system.get_process, parameter[name[dwProcessId]]]
variable[bp] assign[=] call[name[CodeBreakpoint], parameter[name[address], name[condition], name[action]]]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da20e956590>, <ast.Call object at 0x7da20e954460>]]
if compare[name[key] in name[self].__codeBP] begin[:]
variable[msg] assign[=] constant[Already exists (PID %d) : %r]
<ast.Raise object at 0x7da20e955e10>
call[name[self].__codeBP][name[key]] assign[=] name[bp]
return[name[bp]]
|
keyword[def] identifier[define_code_breakpoint] ( identifier[self] , identifier[dwProcessId] , identifier[address] , identifier[condition] = keyword[True] ,
identifier[action] = keyword[None] ):
literal[string]
identifier[process] = identifier[self] . identifier[system] . identifier[get_process] ( identifier[dwProcessId] )
identifier[bp] = identifier[CodeBreakpoint] ( identifier[address] , identifier[condition] , identifier[action] )
identifier[key] =( identifier[dwProcessId] , identifier[bp] . identifier[get_address] ())
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__codeBP] :
identifier[msg] = literal[string]
keyword[raise] identifier[KeyError] ( identifier[msg] %( identifier[dwProcessId] , identifier[self] . identifier[__codeBP] [ identifier[key] ]))
identifier[self] . identifier[__codeBP] [ identifier[key] ]= identifier[bp]
keyword[return] identifier[bp]
|
def define_code_breakpoint(self, dwProcessId, address, condition=True, action=None):
"""
Creates a disabled code breakpoint at the given address.
@see:
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint},
L{erase_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the code instruction to break at.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{CodeBreakpoint}
@return: The code breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = CodeBreakpoint(address, condition, action)
key = (dwProcessId, bp.get_address())
if key in self.__codeBP:
msg = 'Already exists (PID %d) : %r'
raise KeyError(msg % (dwProcessId, self.__codeBP[key])) # depends on [control=['if'], data=['key']]
self.__codeBP[key] = bp
return bp
|
def find_video_by_url(self, video_url):
"""doc: http://open.youku.com/docs/doc?id=44
"""
url = 'https://openapi.youku.com/v2/videos/show_basic.json'
params = {
'client_id': self.client_id,
'video_url': video_url
}
r = requests.get(url, params=params)
check_error(r)
return r.json()
|
def function[find_video_by_url, parameter[self, video_url]]:
constant[doc: http://open.youku.com/docs/doc?id=44
]
variable[url] assign[=] constant[https://openapi.youku.com/v2/videos/show_basic.json]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b255ceb0>, <ast.Constant object at 0x7da1b255f850>], [<ast.Attribute object at 0x7da1b255fb20>, <ast.Name object at 0x7da1b255f490>]]
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
call[name[check_error], parameter[name[r]]]
return[call[name[r].json, parameter[]]]
|
keyword[def] identifier[find_video_by_url] ( identifier[self] , identifier[video_url] ):
literal[string]
identifier[url] = literal[string]
identifier[params] ={
literal[string] : identifier[self] . identifier[client_id] ,
literal[string] : identifier[video_url]
}
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] )
identifier[check_error] ( identifier[r] )
keyword[return] identifier[r] . identifier[json] ()
|
def find_video_by_url(self, video_url):
"""doc: http://open.youku.com/docs/doc?id=44
"""
url = 'https://openapi.youku.com/v2/videos/show_basic.json'
params = {'client_id': self.client_id, 'video_url': video_url}
r = requests.get(url, params=params)
check_error(r)
return r.json()
|
def expand_node(loader, node, expand_method):
"""
Expands paths on a YAML document node. If it is a sequence node (list) items on the first level are expanded. For
a mapping node (dict), values are expanded.
:param loader: YAML loader.
:type loader: yaml.loader.SafeLoader
:param node: Document node.
:type node: ScalarNode, MappingNode, or SequenceNode
:param expand_method: Callable to expand the path with.
:type expand_method: callable
:return: Expanded value.
:rtype: unicode | str | list | dict
"""
if isinstance(node, yaml.nodes.ScalarNode):
val = loader.construct_scalar(node)
return expand_method(val)
elif isinstance(node, yaml.nodes.MappingNode):
val = loader.construct_mapping(node)
for d_key, d_val in six.iteritems(val):
val[d_key] = expand_method(d_val)
return val
elif isinstance(node, yaml.nodes.SequenceNode):
val = loader.construct_sequence(node)
return [expand_method(l_val) for l_val in val]
|
def function[expand_node, parameter[loader, node, expand_method]]:
constant[
Expands paths on a YAML document node. If it is a sequence node (list) items on the first level are expanded. For
a mapping node (dict), values are expanded.
:param loader: YAML loader.
:type loader: yaml.loader.SafeLoader
:param node: Document node.
:type node: ScalarNode, MappingNode, or SequenceNode
:param expand_method: Callable to expand the path with.
:type expand_method: callable
:return: Expanded value.
:rtype: unicode | str | list | dict
]
if call[name[isinstance], parameter[name[node], name[yaml].nodes.ScalarNode]] begin[:]
variable[val] assign[=] call[name[loader].construct_scalar, parameter[name[node]]]
return[call[name[expand_method], parameter[name[val]]]]
|
keyword[def] identifier[expand_node] ( identifier[loader] , identifier[node] , identifier[expand_method] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[node] , identifier[yaml] . identifier[nodes] . identifier[ScalarNode] ):
identifier[val] = identifier[loader] . identifier[construct_scalar] ( identifier[node] )
keyword[return] identifier[expand_method] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[node] , identifier[yaml] . identifier[nodes] . identifier[MappingNode] ):
identifier[val] = identifier[loader] . identifier[construct_mapping] ( identifier[node] )
keyword[for] identifier[d_key] , identifier[d_val] keyword[in] identifier[six] . identifier[iteritems] ( identifier[val] ):
identifier[val] [ identifier[d_key] ]= identifier[expand_method] ( identifier[d_val] )
keyword[return] identifier[val]
keyword[elif] identifier[isinstance] ( identifier[node] , identifier[yaml] . identifier[nodes] . identifier[SequenceNode] ):
identifier[val] = identifier[loader] . identifier[construct_sequence] ( identifier[node] )
keyword[return] [ identifier[expand_method] ( identifier[l_val] ) keyword[for] identifier[l_val] keyword[in] identifier[val] ]
|
def expand_node(loader, node, expand_method):
"""
Expands paths on a YAML document node. If it is a sequence node (list) items on the first level are expanded. For
a mapping node (dict), values are expanded.
:param loader: YAML loader.
:type loader: yaml.loader.SafeLoader
:param node: Document node.
:type node: ScalarNode, MappingNode, or SequenceNode
:param expand_method: Callable to expand the path with.
:type expand_method: callable
:return: Expanded value.
:rtype: unicode | str | list | dict
"""
if isinstance(node, yaml.nodes.ScalarNode):
val = loader.construct_scalar(node)
return expand_method(val) # depends on [control=['if'], data=[]]
elif isinstance(node, yaml.nodes.MappingNode):
val = loader.construct_mapping(node)
for (d_key, d_val) in six.iteritems(val):
val[d_key] = expand_method(d_val) # depends on [control=['for'], data=[]]
return val # depends on [control=['if'], data=[]]
elif isinstance(node, yaml.nodes.SequenceNode):
val = loader.construct_sequence(node)
return [expand_method(l_val) for l_val in val] # depends on [control=['if'], data=[]]
|
def check_options(options, parser):
"""
check options requirements, print and return exit value
"""
if not options.get('release_environment', None):
print("release environment is required")
parser.print_help()
return os.EX_USAGE
return 0
|
def function[check_options, parameter[options, parser]]:
constant[
check options requirements, print and return exit value
]
if <ast.UnaryOp object at 0x7da18f09f520> begin[:]
call[name[print], parameter[constant[release environment is required]]]
call[name[parser].print_help, parameter[]]
return[name[os].EX_USAGE]
return[constant[0]]
|
keyword[def] identifier[check_options] ( identifier[options] , identifier[parser] ):
literal[string]
keyword[if] keyword[not] identifier[options] . identifier[get] ( literal[string] , keyword[None] ):
identifier[print] ( literal[string] )
identifier[parser] . identifier[print_help] ()
keyword[return] identifier[os] . identifier[EX_USAGE]
keyword[return] literal[int]
|
def check_options(options, parser):
"""
check options requirements, print and return exit value
"""
if not options.get('release_environment', None):
print('release environment is required')
parser.print_help()
return os.EX_USAGE # depends on [control=['if'], data=[]]
return 0
|
def direct_messages_destroy(self, id, include_entities=None):
"""
Destroys the direct message with the given id.
https://dev.twitter.com/docs/api/1.1/post/direct_messages/destroy
:param str id:
(*required*) The ID of the direct message.
:param bool include_entities:
The entities node will not be included when set to ``False``.
:returns:
A direct message dict containing the destroyed direct message.
"""
params = {}
set_str_param(params, 'id', id)
set_bool_param(params, 'include_entities', include_entities)
return self._post_api('direct_messages/destroy.json', params)
|
def function[direct_messages_destroy, parameter[self, id, include_entities]]:
constant[
Destroys the direct message with the given id.
https://dev.twitter.com/docs/api/1.1/post/direct_messages/destroy
:param str id:
(*required*) The ID of the direct message.
:param bool include_entities:
The entities node will not be included when set to ``False``.
:returns:
A direct message dict containing the destroyed direct message.
]
variable[params] assign[=] dictionary[[], []]
call[name[set_str_param], parameter[name[params], constant[id], name[id]]]
call[name[set_bool_param], parameter[name[params], constant[include_entities], name[include_entities]]]
return[call[name[self]._post_api, parameter[constant[direct_messages/destroy.json], name[params]]]]
|
keyword[def] identifier[direct_messages_destroy] ( identifier[self] , identifier[id] , identifier[include_entities] = keyword[None] ):
literal[string]
identifier[params] ={}
identifier[set_str_param] ( identifier[params] , literal[string] , identifier[id] )
identifier[set_bool_param] ( identifier[params] , literal[string] , identifier[include_entities] )
keyword[return] identifier[self] . identifier[_post_api] ( literal[string] , identifier[params] )
|
def direct_messages_destroy(self, id, include_entities=None):
"""
Destroys the direct message with the given id.
https://dev.twitter.com/docs/api/1.1/post/direct_messages/destroy
:param str id:
(*required*) The ID of the direct message.
:param bool include_entities:
The entities node will not be included when set to ``False``.
:returns:
A direct message dict containing the destroyed direct message.
"""
params = {}
set_str_param(params, 'id', id)
set_bool_param(params, 'include_entities', include_entities)
return self._post_api('direct_messages/destroy.json', params)
|
def create_decompress(codec_format):
"""Creates a J2K/JP2 decompress structure.
Wraps the openjp2 library function opj_create_decompress.
Parameters
----------
codec_format : int
Specifies codec to select. Should be one of CODEC_J2K or CODEC_JP2.
Returns
-------
codec : Reference to CODEC_TYPE instance.
"""
OPENJP2.opj_create_decompress.argtypes = [CODEC_FORMAT_TYPE]
OPENJP2.opj_create_decompress.restype = CODEC_TYPE
codec = OPENJP2.opj_create_decompress(codec_format)
return codec
|
def function[create_decompress, parameter[codec_format]]:
constant[Creates a J2K/JP2 decompress structure.
Wraps the openjp2 library function opj_create_decompress.
Parameters
----------
codec_format : int
Specifies codec to select. Should be one of CODEC_J2K or CODEC_JP2.
Returns
-------
codec : Reference to CODEC_TYPE instance.
]
name[OPENJP2].opj_create_decompress.argtypes assign[=] list[[<ast.Name object at 0x7da204621cc0>]]
name[OPENJP2].opj_create_decompress.restype assign[=] name[CODEC_TYPE]
variable[codec] assign[=] call[name[OPENJP2].opj_create_decompress, parameter[name[codec_format]]]
return[name[codec]]
|
keyword[def] identifier[create_decompress] ( identifier[codec_format] ):
literal[string]
identifier[OPENJP2] . identifier[opj_create_decompress] . identifier[argtypes] =[ identifier[CODEC_FORMAT_TYPE] ]
identifier[OPENJP2] . identifier[opj_create_decompress] . identifier[restype] = identifier[CODEC_TYPE]
identifier[codec] = identifier[OPENJP2] . identifier[opj_create_decompress] ( identifier[codec_format] )
keyword[return] identifier[codec]
|
def create_decompress(codec_format):
"""Creates a J2K/JP2 decompress structure.
Wraps the openjp2 library function opj_create_decompress.
Parameters
----------
codec_format : int
Specifies codec to select. Should be one of CODEC_J2K or CODEC_JP2.
Returns
-------
codec : Reference to CODEC_TYPE instance.
"""
OPENJP2.opj_create_decompress.argtypes = [CODEC_FORMAT_TYPE]
OPENJP2.opj_create_decompress.restype = CODEC_TYPE
codec = OPENJP2.opj_create_decompress(codec_format)
return codec
|
def GpsSecondsFromPyUTC( pyUTC, leapSecs=14 ):
"""converts the python epoch to gps seconds
pyEpoch = the python epoch from time.time()
"""
t = t=gpsFromUTC(*ymdhmsFromPyUTC( pyUTC ))
return int(t[0] * 60 * 60 * 24 * 7 + t[1])
|
def function[GpsSecondsFromPyUTC, parameter[pyUTC, leapSecs]]:
constant[converts the python epoch to gps seconds
pyEpoch = the python epoch from time.time()
]
variable[t] assign[=] call[name[gpsFromUTC], parameter[<ast.Starred object at 0x7da18f722e60>]]
return[call[name[int], parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[t]][constant[0]] * constant[60]] * constant[60]] * constant[24]] * constant[7]] + call[name[t]][constant[1]]]]]]
|
keyword[def] identifier[GpsSecondsFromPyUTC] ( identifier[pyUTC] , identifier[leapSecs] = literal[int] ):
literal[string]
identifier[t] = identifier[t] = identifier[gpsFromUTC] (* identifier[ymdhmsFromPyUTC] ( identifier[pyUTC] ))
keyword[return] identifier[int] ( identifier[t] [ literal[int] ]* literal[int] * literal[int] * literal[int] * literal[int] + identifier[t] [ literal[int] ])
|
def GpsSecondsFromPyUTC(pyUTC, leapSecs=14):
"""converts the python epoch to gps seconds
pyEpoch = the python epoch from time.time()
"""
t = t = gpsFromUTC(*ymdhmsFromPyUTC(pyUTC))
return int(t[0] * 60 * 60 * 24 * 7 + t[1])
|
def cookiejar_from_dict(*cookie_dicts):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cookie_dicts = tuple((d for d in cookie_dicts if d))
if len(cookie_dicts) == 1 and isinstance(cookie_dicts[0], CookieJar):
return cookie_dicts[0]
cookiejar = CookieJar()
for cookie_dict in cookie_dicts:
if isinstance(cookie_dict, CookieJar):
for cookie in cookie_dict:
cookiejar.set_cookie(cookie)
else:
for name in cookie_dict:
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
|
def function[cookiejar_from_dict, parameter[]]:
constant[Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
]
variable[cookie_dicts] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18c4cd8d0>]]
if <ast.BoolOp object at 0x7da18bc704c0> begin[:]
return[call[name[cookie_dicts]][constant[0]]]
variable[cookiejar] assign[=] call[name[CookieJar], parameter[]]
for taget[name[cookie_dict]] in starred[name[cookie_dicts]] begin[:]
if call[name[isinstance], parameter[name[cookie_dict], name[CookieJar]]] begin[:]
for taget[name[cookie]] in starred[name[cookie_dict]] begin[:]
call[name[cookiejar].set_cookie, parameter[name[cookie]]]
return[name[cookiejar]]
|
keyword[def] identifier[cookiejar_from_dict] (* identifier[cookie_dicts] ):
literal[string]
identifier[cookie_dicts] = identifier[tuple] (( identifier[d] keyword[for] identifier[d] keyword[in] identifier[cookie_dicts] keyword[if] identifier[d] ))
keyword[if] identifier[len] ( identifier[cookie_dicts] )== literal[int] keyword[and] identifier[isinstance] ( identifier[cookie_dicts] [ literal[int] ], identifier[CookieJar] ):
keyword[return] identifier[cookie_dicts] [ literal[int] ]
identifier[cookiejar] = identifier[CookieJar] ()
keyword[for] identifier[cookie_dict] keyword[in] identifier[cookie_dicts] :
keyword[if] identifier[isinstance] ( identifier[cookie_dict] , identifier[CookieJar] ):
keyword[for] identifier[cookie] keyword[in] identifier[cookie_dict] :
identifier[cookiejar] . identifier[set_cookie] ( identifier[cookie] )
keyword[else] :
keyword[for] identifier[name] keyword[in] identifier[cookie_dict] :
identifier[cookiejar] . identifier[set_cookie] ( identifier[create_cookie] ( identifier[name] , identifier[cookie_dict] [ identifier[name] ]))
keyword[return] identifier[cookiejar]
|
def cookiejar_from_dict(*cookie_dicts):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cookie_dicts = tuple((d for d in cookie_dicts if d))
if len(cookie_dicts) == 1 and isinstance(cookie_dicts[0], CookieJar):
return cookie_dicts[0] # depends on [control=['if'], data=[]]
cookiejar = CookieJar()
for cookie_dict in cookie_dicts:
if isinstance(cookie_dict, CookieJar):
for cookie in cookie_dict:
cookiejar.set_cookie(cookie) # depends on [control=['for'], data=['cookie']] # depends on [control=['if'], data=[]]
else:
for name in cookie_dict:
cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=['cookie_dict']]
return cookiejar
|
def check_install_json(self):
"""Check all install.json files for valid schema."""
if self.install_json_schema is None:
return
contents = os.listdir(self.app_path)
if self.args.install_json is not None:
contents = [self.args.install_json]
for install_json in sorted(contents):
# skip files that are not install.json files
if 'install.json' not in install_json:
continue
error = None
status = True
try:
# loading explicitly here to keep all error catching in this file
with open(install_json) as fh:
data = json.loads(fh.read())
validate(data, self.install_json_schema)
except SchemaError as e:
status = False
error = e
except ValidationError as e:
status = False
error = e.message
except ValueError:
# any JSON decode error will be caught during syntax validation
return
if error:
# update validation data errors
self.validation_data['errors'].append(
'Schema validation failed for {} ({}).'.format(install_json, error)
)
# update validation data for module
self.validation_data['schema'].append({'filename': install_json, 'status': status})
|
def function[check_install_json, parameter[self]]:
constant[Check all install.json files for valid schema.]
if compare[name[self].install_json_schema is constant[None]] begin[:]
return[None]
variable[contents] assign[=] call[name[os].listdir, parameter[name[self].app_path]]
if compare[name[self].args.install_json is_not constant[None]] begin[:]
variable[contents] assign[=] list[[<ast.Attribute object at 0x7da20c6c4a00>]]
for taget[name[install_json]] in starred[call[name[sorted], parameter[name[contents]]]] begin[:]
if compare[constant[install.json] <ast.NotIn object at 0x7da2590d7190> name[install_json]] begin[:]
continue
variable[error] assign[=] constant[None]
variable[status] assign[=] constant[True]
<ast.Try object at 0x7da20c6c4bb0>
if name[error] begin[:]
call[call[name[self].validation_data][constant[errors]].append, parameter[call[constant[Schema validation failed for {} ({}).].format, parameter[name[install_json], name[error]]]]]
call[call[name[self].validation_data][constant[schema]].append, parameter[dictionary[[<ast.Constant object at 0x7da18f7238b0>, <ast.Constant object at 0x7da18f7239a0>], [<ast.Name object at 0x7da18f720b20>, <ast.Name object at 0x7da18f722fe0>]]]]
|
keyword[def] identifier[check_install_json] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[install_json_schema] keyword[is] keyword[None] :
keyword[return]
identifier[contents] = identifier[os] . identifier[listdir] ( identifier[self] . identifier[app_path] )
keyword[if] identifier[self] . identifier[args] . identifier[install_json] keyword[is] keyword[not] keyword[None] :
identifier[contents] =[ identifier[self] . identifier[args] . identifier[install_json] ]
keyword[for] identifier[install_json] keyword[in] identifier[sorted] ( identifier[contents] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[install_json] :
keyword[continue]
identifier[error] = keyword[None]
identifier[status] = keyword[True]
keyword[try] :
keyword[with] identifier[open] ( identifier[install_json] ) keyword[as] identifier[fh] :
identifier[data] = identifier[json] . identifier[loads] ( identifier[fh] . identifier[read] ())
identifier[validate] ( identifier[data] , identifier[self] . identifier[install_json_schema] )
keyword[except] identifier[SchemaError] keyword[as] identifier[e] :
identifier[status] = keyword[False]
identifier[error] = identifier[e]
keyword[except] identifier[ValidationError] keyword[as] identifier[e] :
identifier[status] = keyword[False]
identifier[error] = identifier[e] . identifier[message]
keyword[except] identifier[ValueError] :
keyword[return]
keyword[if] identifier[error] :
identifier[self] . identifier[validation_data] [ literal[string] ]. identifier[append] (
literal[string] . identifier[format] ( identifier[install_json] , identifier[error] )
)
identifier[self] . identifier[validation_data] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[install_json] , literal[string] : identifier[status] })
|
def check_install_json(self):
"""Check all install.json files for valid schema."""
if self.install_json_schema is None:
return # depends on [control=['if'], data=[]]
contents = os.listdir(self.app_path)
if self.args.install_json is not None:
contents = [self.args.install_json] # depends on [control=['if'], data=[]]
for install_json in sorted(contents):
# skip files that are not install.json files
if 'install.json' not in install_json:
continue # depends on [control=['if'], data=[]]
error = None
status = True
try:
# loading explicitly here to keep all error catching in this file
with open(install_json) as fh:
data = json.loads(fh.read()) # depends on [control=['with'], data=['fh']]
validate(data, self.install_json_schema) # depends on [control=['try'], data=[]]
except SchemaError as e:
status = False
error = e # depends on [control=['except'], data=['e']]
except ValidationError as e:
status = False
error = e.message # depends on [control=['except'], data=['e']]
except ValueError:
# any JSON decode error will be caught during syntax validation
return # depends on [control=['except'], data=[]]
if error:
# update validation data errors
self.validation_data['errors'].append('Schema validation failed for {} ({}).'.format(install_json, error)) # depends on [control=['if'], data=[]]
# update validation data for module
self.validation_data['schema'].append({'filename': install_json, 'status': status}) # depends on [control=['for'], data=['install_json']]
|
def replay(self, event, ts=0, end_ts=None, with_ts=False):
"""Replay events based on timestamp.
If you split namespace with ts, the replay will only return events
within the same namespace.
:param event: event name
:param ts: replay events after ts, default from 0.
:param end_ts: replay events to ts, default to "+inf".
:param with_ts: return timestamp with events, default to False.
:return: list of pks when with_ts set to False, list of (pk, ts) tuples
when with_ts is True.
"""
key = self._keygen(event, ts)
end_ts = end_ts if end_ts else "+inf"
elements = self.r.zrangebyscore(key, ts, end_ts, withscores=with_ts)
if not with_ts:
return [s(e) for e in elements]
else:
return [(s(e[0]), int(e[1])) for e in elements]
|
def function[replay, parameter[self, event, ts, end_ts, with_ts]]:
constant[Replay events based on timestamp.
If you split namespace with ts, the replay will only return events
within the same namespace.
:param event: event name
:param ts: replay events after ts, default from 0.
:param end_ts: replay events to ts, default to "+inf".
:param with_ts: return timestamp with events, default to False.
:return: list of pks when with_ts set to False, list of (pk, ts) tuples
when with_ts is True.
]
variable[key] assign[=] call[name[self]._keygen, parameter[name[event], name[ts]]]
variable[end_ts] assign[=] <ast.IfExp object at 0x7da18dc04fa0>
variable[elements] assign[=] call[name[self].r.zrangebyscore, parameter[name[key], name[ts], name[end_ts]]]
if <ast.UnaryOp object at 0x7da1b25d1c30> begin[:]
return[<ast.ListComp object at 0x7da1b25d0430>]
|
keyword[def] identifier[replay] ( identifier[self] , identifier[event] , identifier[ts] = literal[int] , identifier[end_ts] = keyword[None] , identifier[with_ts] = keyword[False] ):
literal[string]
identifier[key] = identifier[self] . identifier[_keygen] ( identifier[event] , identifier[ts] )
identifier[end_ts] = identifier[end_ts] keyword[if] identifier[end_ts] keyword[else] literal[string]
identifier[elements] = identifier[self] . identifier[r] . identifier[zrangebyscore] ( identifier[key] , identifier[ts] , identifier[end_ts] , identifier[withscores] = identifier[with_ts] )
keyword[if] keyword[not] identifier[with_ts] :
keyword[return] [ identifier[s] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[elements] ]
keyword[else] :
keyword[return] [( identifier[s] ( identifier[e] [ literal[int] ]), identifier[int] ( identifier[e] [ literal[int] ])) keyword[for] identifier[e] keyword[in] identifier[elements] ]
|
def replay(self, event, ts=0, end_ts=None, with_ts=False):
"""Replay events based on timestamp.
If you split namespace with ts, the replay will only return events
within the same namespace.
:param event: event name
:param ts: replay events after ts, default from 0.
:param end_ts: replay events to ts, default to "+inf".
:param with_ts: return timestamp with events, default to False.
:return: list of pks when with_ts set to False, list of (pk, ts) tuples
when with_ts is True.
"""
key = self._keygen(event, ts)
end_ts = end_ts if end_ts else '+inf'
elements = self.r.zrangebyscore(key, ts, end_ts, withscores=with_ts)
if not with_ts:
return [s(e) for e in elements] # depends on [control=['if'], data=[]]
else:
return [(s(e[0]), int(e[1])) for e in elements]
|
def get_graph_data(self, graph, benchmark):
"""
Iterator over graph data sets
Yields
------
param_idx
Flat index to parameter permutations for parameterized benchmarks.
None if benchmark is not parameterized.
entry_name
Name for the data set. If benchmark is non-parameterized, this is the
benchmark name.
steps
Steps to consider in regression detection.
threshold
User-specified threshold for regression detection.
"""
if benchmark.get('params'):
param_iter = enumerate(zip(itertools.product(*benchmark['params']),
graph.get_steps()))
else:
param_iter = [(None, (None, graph.get_steps()))]
for j, (param, steps) in param_iter:
if param is None:
entry_name = benchmark['name']
else:
entry_name = benchmark['name'] + '({0})'.format(', '.join(param))
start_revision = self._get_start_revision(graph, benchmark, entry_name)
threshold = self._get_threshold(graph, benchmark, entry_name)
if start_revision is None:
# Skip detection
continue
steps = [step for step in steps if step[1] >= start_revision]
yield j, entry_name, steps, threshold
|
def function[get_graph_data, parameter[self, graph, benchmark]]:
constant[
Iterator over graph data sets
Yields
------
param_idx
Flat index to parameter permutations for parameterized benchmarks.
None if benchmark is not parameterized.
entry_name
Name for the data set. If benchmark is non-parameterized, this is the
benchmark name.
steps
Steps to consider in regression detection.
threshold
User-specified threshold for regression detection.
]
if call[name[benchmark].get, parameter[constant[params]]] begin[:]
variable[param_iter] assign[=] call[name[enumerate], parameter[call[name[zip], parameter[call[name[itertools].product, parameter[<ast.Starred object at 0x7da18f58f0a0>]], call[name[graph].get_steps, parameter[]]]]]]
for taget[tuple[[<ast.Name object at 0x7da18f58d030>, <ast.Tuple object at 0x7da18f58dd80>]]] in starred[name[param_iter]] begin[:]
if compare[name[param] is constant[None]] begin[:]
variable[entry_name] assign[=] call[name[benchmark]][constant[name]]
variable[start_revision] assign[=] call[name[self]._get_start_revision, parameter[name[graph], name[benchmark], name[entry_name]]]
variable[threshold] assign[=] call[name[self]._get_threshold, parameter[name[graph], name[benchmark], name[entry_name]]]
if compare[name[start_revision] is constant[None]] begin[:]
continue
variable[steps] assign[=] <ast.ListComp object at 0x7da18f58f160>
<ast.Yield object at 0x7da18f58ceb0>
|
keyword[def] identifier[get_graph_data] ( identifier[self] , identifier[graph] , identifier[benchmark] ):
literal[string]
keyword[if] identifier[benchmark] . identifier[get] ( literal[string] ):
identifier[param_iter] = identifier[enumerate] ( identifier[zip] ( identifier[itertools] . identifier[product] (* identifier[benchmark] [ literal[string] ]),
identifier[graph] . identifier[get_steps] ()))
keyword[else] :
identifier[param_iter] =[( keyword[None] ,( keyword[None] , identifier[graph] . identifier[get_steps] ()))]
keyword[for] identifier[j] ,( identifier[param] , identifier[steps] ) keyword[in] identifier[param_iter] :
keyword[if] identifier[param] keyword[is] keyword[None] :
identifier[entry_name] = identifier[benchmark] [ literal[string] ]
keyword[else] :
identifier[entry_name] = identifier[benchmark] [ literal[string] ]+ literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[param] ))
identifier[start_revision] = identifier[self] . identifier[_get_start_revision] ( identifier[graph] , identifier[benchmark] , identifier[entry_name] )
identifier[threshold] = identifier[self] . identifier[_get_threshold] ( identifier[graph] , identifier[benchmark] , identifier[entry_name] )
keyword[if] identifier[start_revision] keyword[is] keyword[None] :
keyword[continue]
identifier[steps] =[ identifier[step] keyword[for] identifier[step] keyword[in] identifier[steps] keyword[if] identifier[step] [ literal[int] ]>= identifier[start_revision] ]
keyword[yield] identifier[j] , identifier[entry_name] , identifier[steps] , identifier[threshold]
|
def get_graph_data(self, graph, benchmark):
"""
Iterator over graph data sets
Yields
------
param_idx
Flat index to parameter permutations for parameterized benchmarks.
None if benchmark is not parameterized.
entry_name
Name for the data set. If benchmark is non-parameterized, this is the
benchmark name.
steps
Steps to consider in regression detection.
threshold
User-specified threshold for regression detection.
"""
if benchmark.get('params'):
param_iter = enumerate(zip(itertools.product(*benchmark['params']), graph.get_steps())) # depends on [control=['if'], data=[]]
else:
param_iter = [(None, (None, graph.get_steps()))]
for (j, (param, steps)) in param_iter:
if param is None:
entry_name = benchmark['name'] # depends on [control=['if'], data=[]]
else:
entry_name = benchmark['name'] + '({0})'.format(', '.join(param))
start_revision = self._get_start_revision(graph, benchmark, entry_name)
threshold = self._get_threshold(graph, benchmark, entry_name)
if start_revision is None:
# Skip detection
continue # depends on [control=['if'], data=[]]
steps = [step for step in steps if step[1] >= start_revision]
yield (j, entry_name, steps, threshold) # depends on [control=['for'], data=[]]
|
def _mark_received(self, tsn):
"""
Mark an incoming data TSN as received.
"""
# it's a duplicate
if uint32_gte(self._last_received_tsn, tsn) or tsn in self._sack_misordered:
self._sack_duplicates.append(tsn)
return True
# consolidate misordered entries
self._sack_misordered.add(tsn)
for tsn in sorted(self._sack_misordered):
if tsn == tsn_plus_one(self._last_received_tsn):
self._last_received_tsn = tsn
else:
break
# filter out obsolete entries
def is_obsolete(x):
return uint32_gt(x, self._last_received_tsn)
self._sack_duplicates = list(filter(is_obsolete, self._sack_duplicates))
self._sack_misordered = set(filter(is_obsolete, self._sack_misordered))
|
def function[_mark_received, parameter[self, tsn]]:
constant[
Mark an incoming data TSN as received.
]
if <ast.BoolOp object at 0x7da204963be0> begin[:]
call[name[self]._sack_duplicates.append, parameter[name[tsn]]]
return[constant[True]]
call[name[self]._sack_misordered.add, parameter[name[tsn]]]
for taget[name[tsn]] in starred[call[name[sorted], parameter[name[self]._sack_misordered]]] begin[:]
if compare[name[tsn] equal[==] call[name[tsn_plus_one], parameter[name[self]._last_received_tsn]]] begin[:]
name[self]._last_received_tsn assign[=] name[tsn]
def function[is_obsolete, parameter[x]]:
return[call[name[uint32_gt], parameter[name[x], name[self]._last_received_tsn]]]
name[self]._sack_duplicates assign[=] call[name[list], parameter[call[name[filter], parameter[name[is_obsolete], name[self]._sack_duplicates]]]]
name[self]._sack_misordered assign[=] call[name[set], parameter[call[name[filter], parameter[name[is_obsolete], name[self]._sack_misordered]]]]
|
keyword[def] identifier[_mark_received] ( identifier[self] , identifier[tsn] ):
literal[string]
keyword[if] identifier[uint32_gte] ( identifier[self] . identifier[_last_received_tsn] , identifier[tsn] ) keyword[or] identifier[tsn] keyword[in] identifier[self] . identifier[_sack_misordered] :
identifier[self] . identifier[_sack_duplicates] . identifier[append] ( identifier[tsn] )
keyword[return] keyword[True]
identifier[self] . identifier[_sack_misordered] . identifier[add] ( identifier[tsn] )
keyword[for] identifier[tsn] keyword[in] identifier[sorted] ( identifier[self] . identifier[_sack_misordered] ):
keyword[if] identifier[tsn] == identifier[tsn_plus_one] ( identifier[self] . identifier[_last_received_tsn] ):
identifier[self] . identifier[_last_received_tsn] = identifier[tsn]
keyword[else] :
keyword[break]
keyword[def] identifier[is_obsolete] ( identifier[x] ):
keyword[return] identifier[uint32_gt] ( identifier[x] , identifier[self] . identifier[_last_received_tsn] )
identifier[self] . identifier[_sack_duplicates] = identifier[list] ( identifier[filter] ( identifier[is_obsolete] , identifier[self] . identifier[_sack_duplicates] ))
identifier[self] . identifier[_sack_misordered] = identifier[set] ( identifier[filter] ( identifier[is_obsolete] , identifier[self] . identifier[_sack_misordered] ))
|
def _mark_received(self, tsn):
"""
Mark an incoming data TSN as received.
"""
# it's a duplicate
if uint32_gte(self._last_received_tsn, tsn) or tsn in self._sack_misordered:
self._sack_duplicates.append(tsn)
return True # depends on [control=['if'], data=[]]
# consolidate misordered entries
self._sack_misordered.add(tsn)
for tsn in sorted(self._sack_misordered):
if tsn == tsn_plus_one(self._last_received_tsn):
self._last_received_tsn = tsn # depends on [control=['if'], data=['tsn']]
else:
break # depends on [control=['for'], data=['tsn']]
# filter out obsolete entries
def is_obsolete(x):
return uint32_gt(x, self._last_received_tsn)
self._sack_duplicates = list(filter(is_obsolete, self._sack_duplicates))
self._sack_misordered = set(filter(is_obsolete, self._sack_misordered))
|
def _moments_central(data, center=None, order=1):
"""
Calculate the central image moments up to the specified order.
Parameters
----------
data : 2D array-like
The input 2D array.
center : tuple of two floats or `None`, optional
The ``(x, y)`` center position. If `None` it will calculated as
the "center of mass" of the input ``data``.
order : int, optional
The maximum order of the moments to calculate.
Returns
-------
moments : 2D `~numpy.ndarray`
The central image moments.
"""
data = np.asarray(data).astype(float)
if data.ndim != 2:
raise ValueError('data must be a 2D array.')
if center is None:
from ..centroids import centroid_com
center = centroid_com(data)
indices = np.ogrid[[slice(0, i) for i in data.shape]]
ypowers = (indices[0] - center[1]) ** np.arange(order + 1)
xpowers = np.transpose(indices[1] - center[0]) ** np.arange(order + 1)
return np.dot(np.dot(np.transpose(ypowers), data), xpowers)
|
def function[_moments_central, parameter[data, center, order]]:
constant[
Calculate the central image moments up to the specified order.
Parameters
----------
data : 2D array-like
The input 2D array.
center : tuple of two floats or `None`, optional
The ``(x, y)`` center position. If `None` it will calculated as
the "center of mass" of the input ``data``.
order : int, optional
The maximum order of the moments to calculate.
Returns
-------
moments : 2D `~numpy.ndarray`
The central image moments.
]
variable[data] assign[=] call[call[name[np].asarray, parameter[name[data]]].astype, parameter[name[float]]]
if compare[name[data].ndim not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da18f58de40>
if compare[name[center] is constant[None]] begin[:]
from relative_module[centroids] import module[centroid_com]
variable[center] assign[=] call[name[centroid_com], parameter[name[data]]]
variable[indices] assign[=] call[name[np].ogrid][<ast.ListComp object at 0x7da18f58cd60>]
variable[ypowers] assign[=] binary_operation[binary_operation[call[name[indices]][constant[0]] - call[name[center]][constant[1]]] ** call[name[np].arange, parameter[binary_operation[name[order] + constant[1]]]]]
variable[xpowers] assign[=] binary_operation[call[name[np].transpose, parameter[binary_operation[call[name[indices]][constant[1]] - call[name[center]][constant[0]]]]] ** call[name[np].arange, parameter[binary_operation[name[order] + constant[1]]]]]
return[call[name[np].dot, parameter[call[name[np].dot, parameter[call[name[np].transpose, parameter[name[ypowers]]], name[data]]], name[xpowers]]]]
|
keyword[def] identifier[_moments_central] ( identifier[data] , identifier[center] = keyword[None] , identifier[order] = literal[int] ):
literal[string]
identifier[data] = identifier[np] . identifier[asarray] ( identifier[data] ). identifier[astype] ( identifier[float] )
keyword[if] identifier[data] . identifier[ndim] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[center] keyword[is] keyword[None] :
keyword[from] .. identifier[centroids] keyword[import] identifier[centroid_com]
identifier[center] = identifier[centroid_com] ( identifier[data] )
identifier[indices] = identifier[np] . identifier[ogrid] [[ identifier[slice] ( literal[int] , identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[data] . identifier[shape] ]]
identifier[ypowers] =( identifier[indices] [ literal[int] ]- identifier[center] [ literal[int] ])** identifier[np] . identifier[arange] ( identifier[order] + literal[int] )
identifier[xpowers] = identifier[np] . identifier[transpose] ( identifier[indices] [ literal[int] ]- identifier[center] [ literal[int] ])** identifier[np] . identifier[arange] ( identifier[order] + literal[int] )
keyword[return] identifier[np] . identifier[dot] ( identifier[np] . identifier[dot] ( identifier[np] . identifier[transpose] ( identifier[ypowers] ), identifier[data] ), identifier[xpowers] )
|
def _moments_central(data, center=None, order=1):
"""
Calculate the central image moments up to the specified order.
Parameters
----------
data : 2D array-like
The input 2D array.
center : tuple of two floats or `None`, optional
The ``(x, y)`` center position. If `None` it will calculated as
the "center of mass" of the input ``data``.
order : int, optional
The maximum order of the moments to calculate.
Returns
-------
moments : 2D `~numpy.ndarray`
The central image moments.
"""
data = np.asarray(data).astype(float)
if data.ndim != 2:
raise ValueError('data must be a 2D array.') # depends on [control=['if'], data=[]]
if center is None:
from ..centroids import centroid_com
center = centroid_com(data) # depends on [control=['if'], data=['center']]
indices = np.ogrid[[slice(0, i) for i in data.shape]]
ypowers = (indices[0] - center[1]) ** np.arange(order + 1)
xpowers = np.transpose(indices[1] - center[0]) ** np.arange(order + 1)
return np.dot(np.dot(np.transpose(ypowers), data), xpowers)
|
def display_message(
self, subject='Find My iPhone Alert', message="This is a note",
sounds=False
):
""" Send a request to the device to play a sound.
It's possible to pass a custom message by changing the `subject`.
"""
data = json.dumps(
{
'device': self.content['id'],
'subject': subject,
'sound': sounds,
'userText': True,
'text': message
}
)
self.session.post(
self.message_url,
params=self.params,
data=data
)
|
def function[display_message, parameter[self, subject, message, sounds]]:
constant[ Send a request to the device to play a sound.
It's possible to pass a custom message by changing the `subject`.
]
variable[data] assign[=] call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b1726fe0>, <ast.Constant object at 0x7da1b1726710>, <ast.Constant object at 0x7da1b1726f80>, <ast.Constant object at 0x7da1b1726da0>, <ast.Constant object at 0x7da1b17264a0>], [<ast.Subscript object at 0x7da1b17240d0>, <ast.Name object at 0x7da1b17279d0>, <ast.Name object at 0x7da1b17272e0>, <ast.Constant object at 0x7da1b17256f0>, <ast.Name object at 0x7da1b1726410>]]]]
call[name[self].session.post, parameter[name[self].message_url]]
|
keyword[def] identifier[display_message] (
identifier[self] , identifier[subject] = literal[string] , identifier[message] = literal[string] ,
identifier[sounds] = keyword[False]
):
literal[string]
identifier[data] = identifier[json] . identifier[dumps] (
{
literal[string] : identifier[self] . identifier[content] [ literal[string] ],
literal[string] : identifier[subject] ,
literal[string] : identifier[sounds] ,
literal[string] : keyword[True] ,
literal[string] : identifier[message]
}
)
identifier[self] . identifier[session] . identifier[post] (
identifier[self] . identifier[message_url] ,
identifier[params] = identifier[self] . identifier[params] ,
identifier[data] = identifier[data]
)
|
def display_message(self, subject='Find My iPhone Alert', message='This is a note', sounds=False):
""" Send a request to the device to play a sound.
It's possible to pass a custom message by changing the `subject`.
"""
data = json.dumps({'device': self.content['id'], 'subject': subject, 'sound': sounds, 'userText': True, 'text': message})
self.session.post(self.message_url, params=self.params, data=data)
|
def read(self, size=0):
"""Read a chunk of bytes from queue.
size = 0: Read next chunk (arbitrary length)
> 0: Read one chunk of `size` bytes (or less if stream was closed)
< 0: Read all bytes as single chunk (i.e. blocks until stream is closed)
This method blocks until the requested size become available.
However, if close() was called, '' is returned immediately.
"""
res = self.unread
self.unread = ""
# Get next chunk, cumulating requested size as needed
while res == "" or size < 0 or (size > 0 and len(res) < size):
try:
# Read pending data, blocking if neccessary
# (but handle the case that close() is called while waiting)
res += compat.to_native(self.queue.get(True, 0.1))
except compat.queue.Empty:
# There was no pending data: wait for more, unless close() was called
if self.is_closed:
break
# Deliver `size` bytes from buffer
if size > 0 and len(res) > size:
self.unread = res[size:]
res = res[:size]
# print("FileLikeQueue.read({}) => {} bytes".format(size, len(res)))
return res
|
def function[read, parameter[self, size]]:
constant[Read a chunk of bytes from queue.
size = 0: Read next chunk (arbitrary length)
> 0: Read one chunk of `size` bytes (or less if stream was closed)
< 0: Read all bytes as single chunk (i.e. blocks until stream is closed)
This method blocks until the requested size become available.
However, if close() was called, '' is returned immediately.
]
variable[res] assign[=] name[self].unread
name[self].unread assign[=] constant[]
while <ast.BoolOp object at 0x7da1b0191d80> begin[:]
<ast.Try object at 0x7da1b0190f40>
if <ast.BoolOp object at 0x7da1b01902b0> begin[:]
name[self].unread assign[=] call[name[res]][<ast.Slice object at 0x7da1b0191450>]
variable[res] assign[=] call[name[res]][<ast.Slice object at 0x7da1b01918a0>]
return[name[res]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[size] = literal[int] ):
literal[string]
identifier[res] = identifier[self] . identifier[unread]
identifier[self] . identifier[unread] = literal[string]
keyword[while] identifier[res] == literal[string] keyword[or] identifier[size] < literal[int] keyword[or] ( identifier[size] > literal[int] keyword[and] identifier[len] ( identifier[res] )< identifier[size] ):
keyword[try] :
identifier[res] += identifier[compat] . identifier[to_native] ( identifier[self] . identifier[queue] . identifier[get] ( keyword[True] , literal[int] ))
keyword[except] identifier[compat] . identifier[queue] . identifier[Empty] :
keyword[if] identifier[self] . identifier[is_closed] :
keyword[break]
keyword[if] identifier[size] > literal[int] keyword[and] identifier[len] ( identifier[res] )> identifier[size] :
identifier[self] . identifier[unread] = identifier[res] [ identifier[size] :]
identifier[res] = identifier[res] [: identifier[size] ]
keyword[return] identifier[res]
|
def read(self, size=0):
"""Read a chunk of bytes from queue.
size = 0: Read next chunk (arbitrary length)
> 0: Read one chunk of `size` bytes (or less if stream was closed)
< 0: Read all bytes as single chunk (i.e. blocks until stream is closed)
This method blocks until the requested size become available.
However, if close() was called, '' is returned immediately.
"""
res = self.unread
self.unread = ''
# Get next chunk, cumulating requested size as needed
while res == '' or size < 0 or (size > 0 and len(res) < size):
try:
# Read pending data, blocking if neccessary
# (but handle the case that close() is called while waiting)
res += compat.to_native(self.queue.get(True, 0.1)) # depends on [control=['try'], data=[]]
except compat.queue.Empty:
# There was no pending data: wait for more, unless close() was called
if self.is_closed:
break # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
# Deliver `size` bytes from buffer
if size > 0 and len(res) > size:
self.unread = res[size:]
res = res[:size] # depends on [control=['if'], data=[]]
# print("FileLikeQueue.read({}) => {} bytes".format(size, len(res)))
return res
|
def present(name,
block_icmp=None,
prune_block_icmp=False,
default=None,
masquerade=False,
ports=None,
prune_ports=False,
port_fwd=None,
prune_port_fwd=False,
services=None,
prune_services=False,
interfaces=None,
prune_interfaces=False,
sources=None,
prune_sources=False,
rich_rules=None,
prune_rich_rules=False):
'''
Ensure a zone has specific attributes.
name
The zone to modify.
default : None
Set this zone as the default zone if ``True``.
masquerade : False
Enable or disable masquerade for a zone.
block_icmp : None
List of ICMP types to block in the zone.
prune_block_icmp : False
If ``True``, remove all but the specified block_icmp from the zone.
ports : None
List of ports to add to the zone.
prune_ports : False
If ``True``, remove all but the specified ports from the zone.
port_fwd : None
List of port forwards to add to the zone.
prune_port_fwd : False
If ``True``, remove all but the specified port_fwd from the zone.
services : None
List of services to add to the zone.
prune_services : False
If ``True``, remove all but the specified services from the zone.
.. note:: Currently defaults to True for compatibility, but will be changed to False in a future release.
interfaces : None
List of interfaces to add to the zone.
prune_interfaces : False
If ``True``, remove all but the specified interfaces from the zone.
sources : None
List of sources to add to the zone.
prune_sources : False
If ``True``, remove all but the specified sources from the zone.
rich_rules : None
List of rich rules to add to the zone.
prune_rich_rules : False
If ``True``, remove all but the specified rich rules from the zone.
'''
ret = _present(name, block_icmp, prune_block_icmp, default, masquerade, ports, prune_ports,
port_fwd, prune_port_fwd, services, prune_services, interfaces, prune_interfaces,
sources, prune_sources, rich_rules, prune_rich_rules)
# Reload firewalld service on changes
if ret['changes'] != {}:
__salt__['firewalld.reload_rules']()
return ret
|
def function[present, parameter[name, block_icmp, prune_block_icmp, default, masquerade, ports, prune_ports, port_fwd, prune_port_fwd, services, prune_services, interfaces, prune_interfaces, sources, prune_sources, rich_rules, prune_rich_rules]]:
constant[
Ensure a zone has specific attributes.
name
The zone to modify.
default : None
Set this zone as the default zone if ``True``.
masquerade : False
Enable or disable masquerade for a zone.
block_icmp : None
List of ICMP types to block in the zone.
prune_block_icmp : False
If ``True``, remove all but the specified block_icmp from the zone.
ports : None
List of ports to add to the zone.
prune_ports : False
If ``True``, remove all but the specified ports from the zone.
port_fwd : None
List of port forwards to add to the zone.
prune_port_fwd : False
If ``True``, remove all but the specified port_fwd from the zone.
services : None
List of services to add to the zone.
prune_services : False
If ``True``, remove all but the specified services from the zone.
.. note:: Currently defaults to True for compatibility, but will be changed to False in a future release.
interfaces : None
List of interfaces to add to the zone.
prune_interfaces : False
If ``True``, remove all but the specified interfaces from the zone.
sources : None
List of sources to add to the zone.
prune_sources : False
If ``True``, remove all but the specified sources from the zone.
rich_rules : None
List of rich rules to add to the zone.
prune_rich_rules : False
If ``True``, remove all but the specified rich rules from the zone.
]
variable[ret] assign[=] call[name[_present], parameter[name[name], name[block_icmp], name[prune_block_icmp], name[default], name[masquerade], name[ports], name[prune_ports], name[port_fwd], name[prune_port_fwd], name[services], name[prune_services], name[interfaces], name[prune_interfaces], name[sources], name[prune_sources], name[rich_rules], name[prune_rich_rules]]]
if compare[call[name[ret]][constant[changes]] not_equal[!=] dictionary[[], []]] begin[:]
call[call[name[__salt__]][constant[firewalld.reload_rules]], parameter[]]
return[name[ret]]
|
keyword[def] identifier[present] ( identifier[name] ,
identifier[block_icmp] = keyword[None] ,
identifier[prune_block_icmp] = keyword[False] ,
identifier[default] = keyword[None] ,
identifier[masquerade] = keyword[False] ,
identifier[ports] = keyword[None] ,
identifier[prune_ports] = keyword[False] ,
identifier[port_fwd] = keyword[None] ,
identifier[prune_port_fwd] = keyword[False] ,
identifier[services] = keyword[None] ,
identifier[prune_services] = keyword[False] ,
identifier[interfaces] = keyword[None] ,
identifier[prune_interfaces] = keyword[False] ,
identifier[sources] = keyword[None] ,
identifier[prune_sources] = keyword[False] ,
identifier[rich_rules] = keyword[None] ,
identifier[prune_rich_rules] = keyword[False] ):
literal[string]
identifier[ret] = identifier[_present] ( identifier[name] , identifier[block_icmp] , identifier[prune_block_icmp] , identifier[default] , identifier[masquerade] , identifier[ports] , identifier[prune_ports] ,
identifier[port_fwd] , identifier[prune_port_fwd] , identifier[services] , identifier[prune_services] , identifier[interfaces] , identifier[prune_interfaces] ,
identifier[sources] , identifier[prune_sources] , identifier[rich_rules] , identifier[prune_rich_rules] )
keyword[if] identifier[ret] [ literal[string] ]!={}:
identifier[__salt__] [ literal[string] ]()
keyword[return] identifier[ret]
|
def present(name, block_icmp=None, prune_block_icmp=False, default=None, masquerade=False, ports=None, prune_ports=False, port_fwd=None, prune_port_fwd=False, services=None, prune_services=False, interfaces=None, prune_interfaces=False, sources=None, prune_sources=False, rich_rules=None, prune_rich_rules=False):
"""
Ensure a zone has specific attributes.
name
The zone to modify.
default : None
Set this zone as the default zone if ``True``.
masquerade : False
Enable or disable masquerade for a zone.
block_icmp : None
List of ICMP types to block in the zone.
prune_block_icmp : False
If ``True``, remove all but the specified block_icmp from the zone.
ports : None
List of ports to add to the zone.
prune_ports : False
If ``True``, remove all but the specified ports from the zone.
port_fwd : None
List of port forwards to add to the zone.
prune_port_fwd : False
If ``True``, remove all but the specified port_fwd from the zone.
services : None
List of services to add to the zone.
prune_services : False
If ``True``, remove all but the specified services from the zone.
.. note:: Currently defaults to True for compatibility, but will be changed to False in a future release.
interfaces : None
List of interfaces to add to the zone.
prune_interfaces : False
If ``True``, remove all but the specified interfaces from the zone.
sources : None
List of sources to add to the zone.
prune_sources : False
If ``True``, remove all but the specified sources from the zone.
rich_rules : None
List of rich rules to add to the zone.
prune_rich_rules : False
If ``True``, remove all but the specified rich rules from the zone.
"""
ret = _present(name, block_icmp, prune_block_icmp, default, masquerade, ports, prune_ports, port_fwd, prune_port_fwd, services, prune_services, interfaces, prune_interfaces, sources, prune_sources, rich_rules, prune_rich_rules)
# Reload firewalld service on changes
if ret['changes'] != {}:
__salt__['firewalld.reload_rules']() # depends on [control=['if'], data=[]]
return ret
|
def add_io_hook(self, hook):
"""
Args:
hook: This hook will be invoked for every incoming and outgoing CAN frame.
Hook arguments: (direction, frame)
See FRAME_DIRECTION_*, CANFrame.
"""
def proxy(*args):
hook(*args)
self._io_hooks.append(proxy)
return self.HookRemover(lambda: self._io_hooks.remove(proxy))
|
def function[add_io_hook, parameter[self, hook]]:
constant[
Args:
hook: This hook will be invoked for every incoming and outgoing CAN frame.
Hook arguments: (direction, frame)
See FRAME_DIRECTION_*, CANFrame.
]
def function[proxy, parameter[]]:
call[name[hook], parameter[<ast.Starred object at 0x7da2054a6c50>]]
call[name[self]._io_hooks.append, parameter[name[proxy]]]
return[call[name[self].HookRemover, parameter[<ast.Lambda object at 0x7da2054a7be0>]]]
|
keyword[def] identifier[add_io_hook] ( identifier[self] , identifier[hook] ):
literal[string]
keyword[def] identifier[proxy] (* identifier[args] ):
identifier[hook] (* identifier[args] )
identifier[self] . identifier[_io_hooks] . identifier[append] ( identifier[proxy] )
keyword[return] identifier[self] . identifier[HookRemover] ( keyword[lambda] : identifier[self] . identifier[_io_hooks] . identifier[remove] ( identifier[proxy] ))
|
def add_io_hook(self, hook):
"""
Args:
hook: This hook will be invoked for every incoming and outgoing CAN frame.
Hook arguments: (direction, frame)
See FRAME_DIRECTION_*, CANFrame.
"""
def proxy(*args):
hook(*args)
self._io_hooks.append(proxy)
return self.HookRemover(lambda : self._io_hooks.remove(proxy))
|
def check_homepage(package_info, *args):
"""
Does the package have a homepage listed?
:param package_info: package_info dictionary
:return: Tuple (is the condition True or False?, reason if it is False else None)
"""
reason = "Home page missing"
result = False
if package_info.get('home_page') not in BAD_VALUES:
result = True
return result, reason, HAS_HOMEPAGE
|
def function[check_homepage, parameter[package_info]]:
constant[
Does the package have a homepage listed?
:param package_info: package_info dictionary
:return: Tuple (is the condition True or False?, reason if it is False else None)
]
variable[reason] assign[=] constant[Home page missing]
variable[result] assign[=] constant[False]
if compare[call[name[package_info].get, parameter[constant[home_page]]] <ast.NotIn object at 0x7da2590d7190> name[BAD_VALUES]] begin[:]
variable[result] assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da18bcca9e0>, <ast.Name object at 0x7da18bccb2e0>, <ast.Name object at 0x7da18bccb970>]]]
|
keyword[def] identifier[check_homepage] ( identifier[package_info] ,* identifier[args] ):
literal[string]
identifier[reason] = literal[string]
identifier[result] = keyword[False]
keyword[if] identifier[package_info] . identifier[get] ( literal[string] ) keyword[not] keyword[in] identifier[BAD_VALUES] :
identifier[result] = keyword[True]
keyword[return] identifier[result] , identifier[reason] , identifier[HAS_HOMEPAGE]
|
def check_homepage(package_info, *args):
"""
Does the package have a homepage listed?
:param package_info: package_info dictionary
:return: Tuple (is the condition True or False?, reason if it is False else None)
"""
reason = 'Home page missing'
result = False
if package_info.get('home_page') not in BAD_VALUES:
result = True # depends on [control=['if'], data=[]]
return (result, reason, HAS_HOMEPAGE)
|
def remove_request_init_listener(self, fn, *args, **kwargs):
"""
Removes a callback and arguments from the list.
See :meth:`.Session.add_request_init_listener`.
"""
self._request_init_callbacks.remove((fn, args, kwargs))
|
def function[remove_request_init_listener, parameter[self, fn]]:
constant[
Removes a callback and arguments from the list.
See :meth:`.Session.add_request_init_listener`.
]
call[name[self]._request_init_callbacks.remove, parameter[tuple[[<ast.Name object at 0x7da2046227a0>, <ast.Name object at 0x7da2046205b0>, <ast.Name object at 0x7da204621270>]]]]
|
keyword[def] identifier[remove_request_init_listener] ( identifier[self] , identifier[fn] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_request_init_callbacks] . identifier[remove] (( identifier[fn] , identifier[args] , identifier[kwargs] ))
|
def remove_request_init_listener(self, fn, *args, **kwargs):
"""
Removes a callback and arguments from the list.
See :meth:`.Session.add_request_init_listener`.
"""
self._request_init_callbacks.remove((fn, args, kwargs))
|
def task_collection_thread_handler(self, results_queue):
"""Main method for worker to run
Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added.
:param collections.deque results_queue: Queue for worker to output results to
"""
# Add tasks until either we run out or we run into an unexpected error
while self.tasks_to_add and not self.errors:
max_tasks = self._max_tasks_per_request # local copy
chunk_tasks_to_add = []
with self._pending_queue_lock:
while len(chunk_tasks_to_add) < max_tasks and self.tasks_to_add:
chunk_tasks_to_add.append(self.tasks_to_add.pop())
if chunk_tasks_to_add:
self._bulk_add_tasks(results_queue, chunk_tasks_to_add)
|
def function[task_collection_thread_handler, parameter[self, results_queue]]:
constant[Main method for worker to run
Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added.
:param collections.deque results_queue: Queue for worker to output results to
]
while <ast.BoolOp object at 0x7da1b03733d0> begin[:]
variable[max_tasks] assign[=] name[self]._max_tasks_per_request
variable[chunk_tasks_to_add] assign[=] list[[]]
with name[self]._pending_queue_lock begin[:]
while <ast.BoolOp object at 0x7da1b03725f0> begin[:]
call[name[chunk_tasks_to_add].append, parameter[call[name[self].tasks_to_add.pop, parameter[]]]]
if name[chunk_tasks_to_add] begin[:]
call[name[self]._bulk_add_tasks, parameter[name[results_queue], name[chunk_tasks_to_add]]]
|
keyword[def] identifier[task_collection_thread_handler] ( identifier[self] , identifier[results_queue] ):
literal[string]
keyword[while] identifier[self] . identifier[tasks_to_add] keyword[and] keyword[not] identifier[self] . identifier[errors] :
identifier[max_tasks] = identifier[self] . identifier[_max_tasks_per_request]
identifier[chunk_tasks_to_add] =[]
keyword[with] identifier[self] . identifier[_pending_queue_lock] :
keyword[while] identifier[len] ( identifier[chunk_tasks_to_add] )< identifier[max_tasks] keyword[and] identifier[self] . identifier[tasks_to_add] :
identifier[chunk_tasks_to_add] . identifier[append] ( identifier[self] . identifier[tasks_to_add] . identifier[pop] ())
keyword[if] identifier[chunk_tasks_to_add] :
identifier[self] . identifier[_bulk_add_tasks] ( identifier[results_queue] , identifier[chunk_tasks_to_add] )
|
def task_collection_thread_handler(self, results_queue):
"""Main method for worker to run
Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added.
:param collections.deque results_queue: Queue for worker to output results to
"""
# Add tasks until either we run out or we run into an unexpected error
while self.tasks_to_add and (not self.errors):
max_tasks = self._max_tasks_per_request # local copy
chunk_tasks_to_add = []
with self._pending_queue_lock:
while len(chunk_tasks_to_add) < max_tasks and self.tasks_to_add:
chunk_tasks_to_add.append(self.tasks_to_add.pop()) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]]
if chunk_tasks_to_add:
self._bulk_add_tasks(results_queue, chunk_tasks_to_add) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def _merge_map(key, values, partial):
"""A map function used in merge phase.
Stores (key, values) into KeyValues proto and yields its serialization.
Args:
key: values key.
values: values themselves.
partial: True if more values for this key will follow. False otherwise.
Yields:
The proto.
"""
proto = kv_pb.KeyValues()
proto.set_key(key)
proto.value_list().extend(values)
yield proto.Encode()
|
def function[_merge_map, parameter[key, values, partial]]:
constant[A map function used in merge phase.
Stores (key, values) into KeyValues proto and yields its serialization.
Args:
key: values key.
values: values themselves.
partial: True if more values for this key will follow. False otherwise.
Yields:
The proto.
]
variable[proto] assign[=] call[name[kv_pb].KeyValues, parameter[]]
call[name[proto].set_key, parameter[name[key]]]
call[call[name[proto].value_list, parameter[]].extend, parameter[name[values]]]
<ast.Yield object at 0x7da20c6c59c0>
|
keyword[def] identifier[_merge_map] ( identifier[key] , identifier[values] , identifier[partial] ):
literal[string]
identifier[proto] = identifier[kv_pb] . identifier[KeyValues] ()
identifier[proto] . identifier[set_key] ( identifier[key] )
identifier[proto] . identifier[value_list] (). identifier[extend] ( identifier[values] )
keyword[yield] identifier[proto] . identifier[Encode] ()
|
def _merge_map(key, values, partial):
"""A map function used in merge phase.
Stores (key, values) into KeyValues proto and yields its serialization.
Args:
key: values key.
values: values themselves.
partial: True if more values for this key will follow. False otherwise.
Yields:
The proto.
"""
proto = kv_pb.KeyValues()
proto.set_key(key)
proto.value_list().extend(values)
yield proto.Encode()
|
def get_ad_url(self, ad_id, sandbox):
"""
get_ad_url:
gets ad server thing
"""
if sandbox:
return self.sandbox_ad_server + '/view/' + str(ad_id)
else:
return self.ad_server + '/view/' + str(ad_id)
|
def function[get_ad_url, parameter[self, ad_id, sandbox]]:
constant[
get_ad_url:
gets ad server thing
]
if name[sandbox] begin[:]
return[binary_operation[binary_operation[name[self].sandbox_ad_server + constant[/view/]] + call[name[str], parameter[name[ad_id]]]]]
|
keyword[def] identifier[get_ad_url] ( identifier[self] , identifier[ad_id] , identifier[sandbox] ):
literal[string]
keyword[if] identifier[sandbox] :
keyword[return] identifier[self] . identifier[sandbox_ad_server] + literal[string] + identifier[str] ( identifier[ad_id] )
keyword[else] :
keyword[return] identifier[self] . identifier[ad_server] + literal[string] + identifier[str] ( identifier[ad_id] )
|
def get_ad_url(self, ad_id, sandbox):
"""
get_ad_url:
gets ad server thing
"""
if sandbox:
return self.sandbox_ad_server + '/view/' + str(ad_id) # depends on [control=['if'], data=[]]
else:
return self.ad_server + '/view/' + str(ad_id)
|
def get_scanner_params_xml(self):
""" Returns the OSP Daemon's scanner params in xml format. """
scanner_params = Element('scanner_params')
for param_id, param in self.scanner_params.items():
param_xml = SubElement(scanner_params, 'scanner_param')
for name, value in [('id', param_id),
('type', param['type'])]:
param_xml.set(name, value)
for name, value in [('name', param['name']),
('description', param['description']),
('default', param['default']),
('mandatory', param['mandatory'])]:
elem = SubElement(param_xml, name)
elem.text = str(value)
return scanner_params
|
def function[get_scanner_params_xml, parameter[self]]:
constant[ Returns the OSP Daemon's scanner params in xml format. ]
variable[scanner_params] assign[=] call[name[Element], parameter[constant[scanner_params]]]
for taget[tuple[[<ast.Name object at 0x7da20e957010>, <ast.Name object at 0x7da20e9556c0>]]] in starred[call[name[self].scanner_params.items, parameter[]]] begin[:]
variable[param_xml] assign[=] call[name[SubElement], parameter[name[scanner_params], constant[scanner_param]]]
for taget[tuple[[<ast.Name object at 0x7da20e961d80>, <ast.Name object at 0x7da20e961630>]]] in starred[list[[<ast.Tuple object at 0x7da212db4cd0>, <ast.Tuple object at 0x7da212db5030>]]] begin[:]
call[name[param_xml].set, parameter[name[name], name[value]]]
for taget[tuple[[<ast.Name object at 0x7da20e74ad70>, <ast.Name object at 0x7da20e74ae30>]]] in starred[list[[<ast.Tuple object at 0x7da18f721d80>, <ast.Tuple object at 0x7da18f7239d0>, <ast.Tuple object at 0x7da18f7230d0>, <ast.Tuple object at 0x7da18f722170>]]] begin[:]
variable[elem] assign[=] call[name[SubElement], parameter[name[param_xml], name[name]]]
name[elem].text assign[=] call[name[str], parameter[name[value]]]
return[name[scanner_params]]
|
keyword[def] identifier[get_scanner_params_xml] ( identifier[self] ):
literal[string]
identifier[scanner_params] = identifier[Element] ( literal[string] )
keyword[for] identifier[param_id] , identifier[param] keyword[in] identifier[self] . identifier[scanner_params] . identifier[items] ():
identifier[param_xml] = identifier[SubElement] ( identifier[scanner_params] , literal[string] )
keyword[for] identifier[name] , identifier[value] keyword[in] [( literal[string] , identifier[param_id] ),
( literal[string] , identifier[param] [ literal[string] ])]:
identifier[param_xml] . identifier[set] ( identifier[name] , identifier[value] )
keyword[for] identifier[name] , identifier[value] keyword[in] [( literal[string] , identifier[param] [ literal[string] ]),
( literal[string] , identifier[param] [ literal[string] ]),
( literal[string] , identifier[param] [ literal[string] ]),
( literal[string] , identifier[param] [ literal[string] ])]:
identifier[elem] = identifier[SubElement] ( identifier[param_xml] , identifier[name] )
identifier[elem] . identifier[text] = identifier[str] ( identifier[value] )
keyword[return] identifier[scanner_params]
|
def get_scanner_params_xml(self):
""" Returns the OSP Daemon's scanner params in xml format. """
scanner_params = Element('scanner_params')
for (param_id, param) in self.scanner_params.items():
param_xml = SubElement(scanner_params, 'scanner_param')
for (name, value) in [('id', param_id), ('type', param['type'])]:
param_xml.set(name, value) # depends on [control=['for'], data=[]]
for (name, value) in [('name', param['name']), ('description', param['description']), ('default', param['default']), ('mandatory', param['mandatory'])]:
elem = SubElement(param_xml, name)
elem.text = str(value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return scanner_params
|
def parse_address(self, address_line):
"""
Parses the given address into it's individual address fields.
"""
params = {"term": address_line}
json = self._make_request('/address/getParsedAddress', params)
if json is None:
return None
return Address.from_json(json)
|
def function[parse_address, parameter[self, address_line]]:
constant[
Parses the given address into it's individual address fields.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b71ea0>], [<ast.Name object at 0x7da1b0b71870>]]
variable[json] assign[=] call[name[self]._make_request, parameter[constant[/address/getParsedAddress], name[params]]]
if compare[name[json] is constant[None]] begin[:]
return[constant[None]]
return[call[name[Address].from_json, parameter[name[json]]]]
|
keyword[def] identifier[parse_address] ( identifier[self] , identifier[address_line] ):
literal[string]
identifier[params] ={ literal[string] : identifier[address_line] }
identifier[json] = identifier[self] . identifier[_make_request] ( literal[string] , identifier[params] )
keyword[if] identifier[json] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[Address] . identifier[from_json] ( identifier[json] )
|
def parse_address(self, address_line):
"""
Parses the given address into it's individual address fields.
"""
params = {'term': address_line}
json = self._make_request('/address/getParsedAddress', params)
if json is None:
return None # depends on [control=['if'], data=[]]
return Address.from_json(json)
|
def importData(directory):
"""Parse the input files and return two dictionnaries"""
dataTask = OrderedDict()
dataQueue = OrderedDict()
for fichier in sorted(os.listdir(directory)):
try:
with open("{directory}/{fichier}".format(**locals()), 'rb') as f:
fileName, fileType = fichier.rsplit('-', 1)
if fileType == "QUEUE":
dataQueue[fileName] = pickle.load(f)
else:
dataTask[fileName] = pickle.load(f)
except:
# Can be a directory
pass
return dataTask, dataQueue
|
def function[importData, parameter[directory]]:
constant[Parse the input files and return two dictionnaries]
variable[dataTask] assign[=] call[name[OrderedDict], parameter[]]
variable[dataQueue] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[fichier]] in starred[call[name[sorted], parameter[call[name[os].listdir, parameter[name[directory]]]]]] begin[:]
<ast.Try object at 0x7da18eb577f0>
return[tuple[[<ast.Name object at 0x7da18eb56bc0>, <ast.Name object at 0x7da18eb54040>]]]
|
keyword[def] identifier[importData] ( identifier[directory] ):
literal[string]
identifier[dataTask] = identifier[OrderedDict] ()
identifier[dataQueue] = identifier[OrderedDict] ()
keyword[for] identifier[fichier] keyword[in] identifier[sorted] ( identifier[os] . identifier[listdir] ( identifier[directory] )):
keyword[try] :
keyword[with] identifier[open] ( literal[string] . identifier[format] (** identifier[locals] ()), literal[string] ) keyword[as] identifier[f] :
identifier[fileName] , identifier[fileType] = identifier[fichier] . identifier[rsplit] ( literal[string] , literal[int] )
keyword[if] identifier[fileType] == literal[string] :
identifier[dataQueue] [ identifier[fileName] ]= identifier[pickle] . identifier[load] ( identifier[f] )
keyword[else] :
identifier[dataTask] [ identifier[fileName] ]= identifier[pickle] . identifier[load] ( identifier[f] )
keyword[except] :
keyword[pass]
keyword[return] identifier[dataTask] , identifier[dataQueue]
|
def importData(directory):
"""Parse the input files and return two dictionnaries"""
dataTask = OrderedDict()
dataQueue = OrderedDict()
for fichier in sorted(os.listdir(directory)):
try:
with open('{directory}/{fichier}'.format(**locals()), 'rb') as f:
(fileName, fileType) = fichier.rsplit('-', 1)
if fileType == 'QUEUE':
dataQueue[fileName] = pickle.load(f) # depends on [control=['if'], data=[]]
else:
dataTask[fileName] = pickle.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except:
# Can be a directory
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fichier']]
return (dataTask, dataQueue)
|
def assert_equals(df1, df2, ignore_order=set(), ignore_indices=set(), all_close=False, _return_reason=False):
'''
Assert 2 data frames are equal
A more verbose form of ``assert equals(df1, df2, ...)``. See `equals` for an explanation of the parameters.
Parameters
----------
df1 : ~pandas.DataFrame
Actual data frame.
df2 : ~pandas.DataFrame
Expected data frame.
ignore_order : ~typing.Set[int]
ignore_indices : ~typing.Set[int]
all_close : bool
'''
equals_, reason = equals(df1, df2, ignore_order, ignore_indices, all_close, _return_reason=True)
assert equals_, '{}\n\n{}\n\n{}'.format(reason, df1.to_string(), df2.to_string())
|
def function[assert_equals, parameter[df1, df2, ignore_order, ignore_indices, all_close, _return_reason]]:
constant[
Assert 2 data frames are equal
A more verbose form of ``assert equals(df1, df2, ...)``. See `equals` for an explanation of the parameters.
Parameters
----------
df1 : ~pandas.DataFrame
Actual data frame.
df2 : ~pandas.DataFrame
Expected data frame.
ignore_order : ~typing.Set[int]
ignore_indices : ~typing.Set[int]
all_close : bool
]
<ast.Tuple object at 0x7da2041d8b80> assign[=] call[name[equals], parameter[name[df1], name[df2], name[ignore_order], name[ignore_indices], name[all_close]]]
assert[name[equals_]]
|
keyword[def] identifier[assert_equals] ( identifier[df1] , identifier[df2] , identifier[ignore_order] = identifier[set] (), identifier[ignore_indices] = identifier[set] (), identifier[all_close] = keyword[False] , identifier[_return_reason] = keyword[False] ):
literal[string]
identifier[equals_] , identifier[reason] = identifier[equals] ( identifier[df1] , identifier[df2] , identifier[ignore_order] , identifier[ignore_indices] , identifier[all_close] , identifier[_return_reason] = keyword[True] )
keyword[assert] identifier[equals_] , literal[string] . identifier[format] ( identifier[reason] , identifier[df1] . identifier[to_string] (), identifier[df2] . identifier[to_string] ())
|
def assert_equals(df1, df2, ignore_order=set(), ignore_indices=set(), all_close=False, _return_reason=False):
"""
Assert 2 data frames are equal
A more verbose form of ``assert equals(df1, df2, ...)``. See `equals` for an explanation of the parameters.
Parameters
----------
df1 : ~pandas.DataFrame
Actual data frame.
df2 : ~pandas.DataFrame
Expected data frame.
ignore_order : ~typing.Set[int]
ignore_indices : ~typing.Set[int]
all_close : bool
"""
(equals_, reason) = equals(df1, df2, ignore_order, ignore_indices, all_close, _return_reason=True)
assert equals_, '{}\n\n{}\n\n{}'.format(reason, df1.to_string(), df2.to_string())
|
def fit_2dgaussian(data, error=None, mask=None):
"""
Fit a 2D Gaussian plus a constant to a 2D image.
Invalid values (e.g. NaNs or infs) in the ``data`` or ``error``
arrays are automatically masked. The mask for invalid values
represents the combination of the invalid-value masks for the
``data`` and ``error`` arrays.
Parameters
----------
data : array_like
The 2D array of the image.
error : array_like, optional
The 2D array of the 1-sigma errors of the input ``data``.
mask : array_like (bool), optional
A boolean mask, with the same shape as ``data``, where a `True`
value indicates the corresponding element of ``data`` is masked.
Returns
-------
result : A `GaussianConst2D` model instance.
The best-fitting Gaussian 2D model.
"""
from ..morphology import data_properties # prevent circular imports
data = np.ma.asanyarray(data)
if mask is not None and mask is not np.ma.nomask:
mask = np.asanyarray(mask)
if data.shape != mask.shape:
raise ValueError('data and mask must have the same shape.')
data.mask |= mask
if np.any(~np.isfinite(data)):
data = np.ma.masked_invalid(data)
warnings.warn('Input data contains input values (e.g. NaNs or infs), '
'which were automatically masked.', AstropyUserWarning)
if error is not None:
error = np.ma.masked_invalid(error)
if data.shape != error.shape:
raise ValueError('data and error must have the same shape.')
data.mask |= error.mask
weights = 1.0 / error.clip(min=1.e-30)
else:
weights = np.ones(data.shape)
if np.ma.count(data) < 7:
raise ValueError('Input data must have a least 7 unmasked values to '
'fit a 2D Gaussian plus a constant.')
# assign zero weight to masked pixels
if data.mask is not np.ma.nomask:
weights[data.mask] = 0.
mask = data.mask
data.fill_value = 0.0
data = data.filled()
# Subtract the minimum of the data as a crude background estimate.
# This will also make the data values positive, preventing issues with
# the moment estimation in data_properties (moments from negative data
# values can yield undefined Gaussian parameters, e.g. x/y_stddev).
props = data_properties(data - np.min(data), mask=mask)
init_const = 0. # subtracted data minimum above
init_amplitude = np.ptp(data)
g_init = GaussianConst2D(constant=init_const, amplitude=init_amplitude,
x_mean=props.xcentroid.value,
y_mean=props.ycentroid.value,
x_stddev=props.semimajor_axis_sigma.value,
y_stddev=props.semiminor_axis_sigma.value,
theta=props.orientation.value)
fitter = LevMarLSQFitter()
y, x = np.indices(data.shape)
gfit = fitter(g_init, x, y, data, weights=weights)
return gfit
|
def function[fit_2dgaussian, parameter[data, error, mask]]:
constant[
Fit a 2D Gaussian plus a constant to a 2D image.
Invalid values (e.g. NaNs or infs) in the ``data`` or ``error``
arrays are automatically masked. The mask for invalid values
represents the combination of the invalid-value masks for the
``data`` and ``error`` arrays.
Parameters
----------
data : array_like
The 2D array of the image.
error : array_like, optional
The 2D array of the 1-sigma errors of the input ``data``.
mask : array_like (bool), optional
A boolean mask, with the same shape as ``data``, where a `True`
value indicates the corresponding element of ``data`` is masked.
Returns
-------
result : A `GaussianConst2D` model instance.
The best-fitting Gaussian 2D model.
]
from relative_module[morphology] import module[data_properties]
variable[data] assign[=] call[name[np].ma.asanyarray, parameter[name[data]]]
if <ast.BoolOp object at 0x7da1b1149ae0> begin[:]
variable[mask] assign[=] call[name[np].asanyarray, parameter[name[mask]]]
if compare[name[data].shape not_equal[!=] name[mask].shape] begin[:]
<ast.Raise object at 0x7da1b114bfa0>
<ast.AugAssign object at 0x7da1b114bcd0>
if call[name[np].any, parameter[<ast.UnaryOp object at 0x7da1b1149c00>]] begin[:]
variable[data] assign[=] call[name[np].ma.masked_invalid, parameter[name[data]]]
call[name[warnings].warn, parameter[constant[Input data contains input values (e.g. NaNs or infs), which were automatically masked.], name[AstropyUserWarning]]]
if compare[name[error] is_not constant[None]] begin[:]
variable[error] assign[=] call[name[np].ma.masked_invalid, parameter[name[error]]]
if compare[name[data].shape not_equal[!=] name[error].shape] begin[:]
<ast.Raise object at 0x7da1b114a470>
<ast.AugAssign object at 0x7da1b1148b20>
variable[weights] assign[=] binary_operation[constant[1.0] / call[name[error].clip, parameter[]]]
if compare[call[name[np].ma.count, parameter[name[data]]] less[<] constant[7]] begin[:]
<ast.Raise object at 0x7da1b114ad10>
if compare[name[data].mask is_not name[np].ma.nomask] begin[:]
call[name[weights]][name[data].mask] assign[=] constant[0.0]
variable[mask] assign[=] name[data].mask
name[data].fill_value assign[=] constant[0.0]
variable[data] assign[=] call[name[data].filled, parameter[]]
variable[props] assign[=] call[name[data_properties], parameter[binary_operation[name[data] - call[name[np].min, parameter[name[data]]]]]]
variable[init_const] assign[=] constant[0.0]
variable[init_amplitude] assign[=] call[name[np].ptp, parameter[name[data]]]
variable[g_init] assign[=] call[name[GaussianConst2D], parameter[]]
variable[fitter] assign[=] call[name[LevMarLSQFitter], parameter[]]
<ast.Tuple object at 0x7da1b1149d20> assign[=] call[name[np].indices, parameter[name[data].shape]]
variable[gfit] assign[=] call[name[fitter], parameter[name[g_init], name[x], name[y], name[data]]]
return[name[gfit]]
|
keyword[def] identifier[fit_2dgaussian] ( identifier[data] , identifier[error] = keyword[None] , identifier[mask] = keyword[None] ):
literal[string]
keyword[from] .. identifier[morphology] keyword[import] identifier[data_properties]
identifier[data] = identifier[np] . identifier[ma] . identifier[asanyarray] ( identifier[data] )
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] keyword[and] identifier[mask] keyword[is] keyword[not] identifier[np] . identifier[ma] . identifier[nomask] :
identifier[mask] = identifier[np] . identifier[asanyarray] ( identifier[mask] )
keyword[if] identifier[data] . identifier[shape] != identifier[mask] . identifier[shape] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[data] . identifier[mask] |= identifier[mask]
keyword[if] identifier[np] . identifier[any] (~ identifier[np] . identifier[isfinite] ( identifier[data] )):
identifier[data] = identifier[np] . identifier[ma] . identifier[masked_invalid] ( identifier[data] )
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] , identifier[AstropyUserWarning] )
keyword[if] identifier[error] keyword[is] keyword[not] keyword[None] :
identifier[error] = identifier[np] . identifier[ma] . identifier[masked_invalid] ( identifier[error] )
keyword[if] identifier[data] . identifier[shape] != identifier[error] . identifier[shape] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[data] . identifier[mask] |= identifier[error] . identifier[mask]
identifier[weights] = literal[int] / identifier[error] . identifier[clip] ( identifier[min] = literal[int] )
keyword[else] :
identifier[weights] = identifier[np] . identifier[ones] ( identifier[data] . identifier[shape] )
keyword[if] identifier[np] . identifier[ma] . identifier[count] ( identifier[data] )< literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[data] . identifier[mask] keyword[is] keyword[not] identifier[np] . identifier[ma] . identifier[nomask] :
identifier[weights] [ identifier[data] . identifier[mask] ]= literal[int]
identifier[mask] = identifier[data] . identifier[mask]
identifier[data] . identifier[fill_value] = literal[int]
identifier[data] = identifier[data] . identifier[filled] ()
identifier[props] = identifier[data_properties] ( identifier[data] - identifier[np] . identifier[min] ( identifier[data] ), identifier[mask] = identifier[mask] )
identifier[init_const] = literal[int]
identifier[init_amplitude] = identifier[np] . identifier[ptp] ( identifier[data] )
identifier[g_init] = identifier[GaussianConst2D] ( identifier[constant] = identifier[init_const] , identifier[amplitude] = identifier[init_amplitude] ,
identifier[x_mean] = identifier[props] . identifier[xcentroid] . identifier[value] ,
identifier[y_mean] = identifier[props] . identifier[ycentroid] . identifier[value] ,
identifier[x_stddev] = identifier[props] . identifier[semimajor_axis_sigma] . identifier[value] ,
identifier[y_stddev] = identifier[props] . identifier[semiminor_axis_sigma] . identifier[value] ,
identifier[theta] = identifier[props] . identifier[orientation] . identifier[value] )
identifier[fitter] = identifier[LevMarLSQFitter] ()
identifier[y] , identifier[x] = identifier[np] . identifier[indices] ( identifier[data] . identifier[shape] )
identifier[gfit] = identifier[fitter] ( identifier[g_init] , identifier[x] , identifier[y] , identifier[data] , identifier[weights] = identifier[weights] )
keyword[return] identifier[gfit]
|
def fit_2dgaussian(data, error=None, mask=None):
"""
Fit a 2D Gaussian plus a constant to a 2D image.
Invalid values (e.g. NaNs or infs) in the ``data`` or ``error``
arrays are automatically masked. The mask for invalid values
represents the combination of the invalid-value masks for the
``data`` and ``error`` arrays.
Parameters
----------
data : array_like
The 2D array of the image.
error : array_like, optional
The 2D array of the 1-sigma errors of the input ``data``.
mask : array_like (bool), optional
A boolean mask, with the same shape as ``data``, where a `True`
value indicates the corresponding element of ``data`` is masked.
Returns
-------
result : A `GaussianConst2D` model instance.
The best-fitting Gaussian 2D model.
"""
from ..morphology import data_properties # prevent circular imports
data = np.ma.asanyarray(data)
if mask is not None and mask is not np.ma.nomask:
mask = np.asanyarray(mask)
if data.shape != mask.shape:
raise ValueError('data and mask must have the same shape.') # depends on [control=['if'], data=[]]
data.mask |= mask # depends on [control=['if'], data=[]]
if np.any(~np.isfinite(data)):
data = np.ma.masked_invalid(data)
warnings.warn('Input data contains input values (e.g. NaNs or infs), which were automatically masked.', AstropyUserWarning) # depends on [control=['if'], data=[]]
if error is not None:
error = np.ma.masked_invalid(error)
if data.shape != error.shape:
raise ValueError('data and error must have the same shape.') # depends on [control=['if'], data=[]]
data.mask |= error.mask
weights = 1.0 / error.clip(min=1e-30) # depends on [control=['if'], data=['error']]
else:
weights = np.ones(data.shape)
if np.ma.count(data) < 7:
raise ValueError('Input data must have a least 7 unmasked values to fit a 2D Gaussian plus a constant.') # depends on [control=['if'], data=[]]
# assign zero weight to masked pixels
if data.mask is not np.ma.nomask:
weights[data.mask] = 0.0 # depends on [control=['if'], data=[]]
mask = data.mask
data.fill_value = 0.0
data = data.filled()
# Subtract the minimum of the data as a crude background estimate.
# This will also make the data values positive, preventing issues with
# the moment estimation in data_properties (moments from negative data
# values can yield undefined Gaussian parameters, e.g. x/y_stddev).
props = data_properties(data - np.min(data), mask=mask)
init_const = 0.0 # subtracted data minimum above
init_amplitude = np.ptp(data)
g_init = GaussianConst2D(constant=init_const, amplitude=init_amplitude, x_mean=props.xcentroid.value, y_mean=props.ycentroid.value, x_stddev=props.semimajor_axis_sigma.value, y_stddev=props.semiminor_axis_sigma.value, theta=props.orientation.value)
fitter = LevMarLSQFitter()
(y, x) = np.indices(data.shape)
gfit = fitter(g_init, x, y, data, weights=weights)
return gfit
|
def update_vpnservice(vpnservice, desc, profile=None):
'''
Updates a VPN service
CLI Example:
.. code-block:: bash
salt '*' neutron.update_vpnservice vpnservice-name desc='VPN Service1'
:param vpnservice: ID or name of vpn service to update
:param desc: Set a description for the VPN service
:param profile: Profile to build on (Optional)
:return: Value of updated VPN service information
'''
conn = _auth(profile)
return conn.update_vpnservice(vpnservice, desc)
|
def function[update_vpnservice, parameter[vpnservice, desc, profile]]:
constant[
Updates a VPN service
CLI Example:
.. code-block:: bash
salt '*' neutron.update_vpnservice vpnservice-name desc='VPN Service1'
:param vpnservice: ID or name of vpn service to update
:param desc: Set a description for the VPN service
:param profile: Profile to build on (Optional)
:return: Value of updated VPN service information
]
variable[conn] assign[=] call[name[_auth], parameter[name[profile]]]
return[call[name[conn].update_vpnservice, parameter[name[vpnservice], name[desc]]]]
|
keyword[def] identifier[update_vpnservice] ( identifier[vpnservice] , identifier[desc] , identifier[profile] = keyword[None] ):
literal[string]
identifier[conn] = identifier[_auth] ( identifier[profile] )
keyword[return] identifier[conn] . identifier[update_vpnservice] ( identifier[vpnservice] , identifier[desc] )
|
def update_vpnservice(vpnservice, desc, profile=None):
"""
Updates a VPN service
CLI Example:
.. code-block:: bash
salt '*' neutron.update_vpnservice vpnservice-name desc='VPN Service1'
:param vpnservice: ID or name of vpn service to update
:param desc: Set a description for the VPN service
:param profile: Profile to build on (Optional)
:return: Value of updated VPN service information
"""
conn = _auth(profile)
return conn.update_vpnservice(vpnservice, desc)
|
def prompt_yes_or_no(message):
""" prompt_yes_or_no: Prompt user to reply with a y/n response
Args: None
Returns: None
"""
user_input = input("{} [y/n]:".format(message)).lower()
if user_input.startswith("y"):
return True
elif user_input.startswith("n"):
return False
else:
return prompt_yes_or_no(message)
|
def function[prompt_yes_or_no, parameter[message]]:
constant[ prompt_yes_or_no: Prompt user to reply with a y/n response
Args: None
Returns: None
]
variable[user_input] assign[=] call[call[name[input], parameter[call[constant[{} [y/n]:].format, parameter[name[message]]]]].lower, parameter[]]
if call[name[user_input].startswith, parameter[constant[y]]] begin[:]
return[constant[True]]
|
keyword[def] identifier[prompt_yes_or_no] ( identifier[message] ):
literal[string]
identifier[user_input] = identifier[input] ( literal[string] . identifier[format] ( identifier[message] )). identifier[lower] ()
keyword[if] identifier[user_input] . identifier[startswith] ( literal[string] ):
keyword[return] keyword[True]
keyword[elif] identifier[user_input] . identifier[startswith] ( literal[string] ):
keyword[return] keyword[False]
keyword[else] :
keyword[return] identifier[prompt_yes_or_no] ( identifier[message] )
|
def prompt_yes_or_no(message):
""" prompt_yes_or_no: Prompt user to reply with a y/n response
Args: None
Returns: None
"""
user_input = input('{} [y/n]:'.format(message)).lower()
if user_input.startswith('y'):
return True # depends on [control=['if'], data=[]]
elif user_input.startswith('n'):
return False # depends on [control=['if'], data=[]]
else:
return prompt_yes_or_no(message)
|
def stylesheet_declarations(string, is_merc=False, scale=1):
""" Parse a string representing a stylesheet into a list of declarations.
Required boolean is_merc indicates whether the projection should
be interpreted as spherical mercator, so we know what to do with
zoom/scale-denominator in parse_rule().
"""
# everything is display: map by default
display_map = Declaration(Selector(SelectorElement(['*'], [])),
Property('display'), Value('map', False),
(False, (0, 0, 0), (0, 0)))
declarations = [display_map]
tokens = cssTokenizer().tokenize(string)
variables = {}
while True:
try:
for declaration in parse_rule(tokens, variables, [], [], is_merc):
if scale != 1:
declaration.scaleBy(scale)
declarations.append(declaration)
except StopIteration:
break
# sort by a css-like method
return sorted(declarations, key=operator.attrgetter('sort_key'))
|
def function[stylesheet_declarations, parameter[string, is_merc, scale]]:
constant[ Parse a string representing a stylesheet into a list of declarations.
Required boolean is_merc indicates whether the projection should
be interpreted as spherical mercator, so we know what to do with
zoom/scale-denominator in parse_rule().
]
variable[display_map] assign[=] call[name[Declaration], parameter[call[name[Selector], parameter[call[name[SelectorElement], parameter[list[[<ast.Constant object at 0x7da1b27ba770>]], list[[]]]]]], call[name[Property], parameter[constant[display]]], call[name[Value], parameter[constant[map], constant[False]]], tuple[[<ast.Constant object at 0x7da1b27b8fd0>, <ast.Tuple object at 0x7da1b27b93f0>, <ast.Tuple object at 0x7da1b27b8970>]]]]
variable[declarations] assign[=] list[[<ast.Name object at 0x7da1b27bbd30>]]
variable[tokens] assign[=] call[call[name[cssTokenizer], parameter[]].tokenize, parameter[name[string]]]
variable[variables] assign[=] dictionary[[], []]
while constant[True] begin[:]
<ast.Try object at 0x7da1b27b9f90>
return[call[name[sorted], parameter[name[declarations]]]]
|
keyword[def] identifier[stylesheet_declarations] ( identifier[string] , identifier[is_merc] = keyword[False] , identifier[scale] = literal[int] ):
literal[string]
identifier[display_map] = identifier[Declaration] ( identifier[Selector] ( identifier[SelectorElement] ([ literal[string] ],[])),
identifier[Property] ( literal[string] ), identifier[Value] ( literal[string] , keyword[False] ),
( keyword[False] ,( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] )))
identifier[declarations] =[ identifier[display_map] ]
identifier[tokens] = identifier[cssTokenizer] (). identifier[tokenize] ( identifier[string] )
identifier[variables] ={}
keyword[while] keyword[True] :
keyword[try] :
keyword[for] identifier[declaration] keyword[in] identifier[parse_rule] ( identifier[tokens] , identifier[variables] ,[],[], identifier[is_merc] ):
keyword[if] identifier[scale] != literal[int] :
identifier[declaration] . identifier[scaleBy] ( identifier[scale] )
identifier[declarations] . identifier[append] ( identifier[declaration] )
keyword[except] identifier[StopIteration] :
keyword[break]
keyword[return] identifier[sorted] ( identifier[declarations] , identifier[key] = identifier[operator] . identifier[attrgetter] ( literal[string] ))
|
def stylesheet_declarations(string, is_merc=False, scale=1):
""" Parse a string representing a stylesheet into a list of declarations.
Required boolean is_merc indicates whether the projection should
be interpreted as spherical mercator, so we know what to do with
zoom/scale-denominator in parse_rule().
"""
# everything is display: map by default
display_map = Declaration(Selector(SelectorElement(['*'], [])), Property('display'), Value('map', False), (False, (0, 0, 0), (0, 0)))
declarations = [display_map]
tokens = cssTokenizer().tokenize(string)
variables = {}
while True:
try:
for declaration in parse_rule(tokens, variables, [], [], is_merc):
if scale != 1:
declaration.scaleBy(scale) # depends on [control=['if'], data=['scale']]
declarations.append(declaration) # depends on [control=['for'], data=['declaration']] # depends on [control=['try'], data=[]]
except StopIteration:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
# sort by a css-like method
return sorted(declarations, key=operator.attrgetter('sort_key'))
|
def _EnsureFileExists(self):
"""Touches a file; returns False on error, True on success."""
if not os.path.exists(self._filename):
old_umask = os.umask(0o177)
try:
open(self._filename, 'a+b').close()
except OSError:
return False
finally:
os.umask(old_umask)
return True
|
def function[_EnsureFileExists, parameter[self]]:
constant[Touches a file; returns False on error, True on success.]
if <ast.UnaryOp object at 0x7da1b07fb8b0> begin[:]
variable[old_umask] assign[=] call[name[os].umask, parameter[constant[127]]]
<ast.Try object at 0x7da1b07fb5b0>
return[constant[True]]
|
keyword[def] identifier[_EnsureFileExists] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[_filename] ):
identifier[old_umask] = identifier[os] . identifier[umask] ( literal[int] )
keyword[try] :
identifier[open] ( identifier[self] . identifier[_filename] , literal[string] ). identifier[close] ()
keyword[except] identifier[OSError] :
keyword[return] keyword[False]
keyword[finally] :
identifier[os] . identifier[umask] ( identifier[old_umask] )
keyword[return] keyword[True]
|
def _EnsureFileExists(self):
"""Touches a file; returns False on error, True on success."""
if not os.path.exists(self._filename):
old_umask = os.umask(127)
try:
open(self._filename, 'a+b').close() # depends on [control=['try'], data=[]]
except OSError:
return False # depends on [control=['except'], data=[]]
finally:
os.umask(old_umask) # depends on [control=['if'], data=[]]
return True
|
def keep_only_fields(self):
"""Keep only fields listed in field_list."""
for tag in self.record.keys():
if tag not in self.fields_list:
record_delete_fields(self.record, tag)
|
def function[keep_only_fields, parameter[self]]:
constant[Keep only fields listed in field_list.]
for taget[name[tag]] in starred[call[name[self].record.keys, parameter[]]] begin[:]
if compare[name[tag] <ast.NotIn object at 0x7da2590d7190> name[self].fields_list] begin[:]
call[name[record_delete_fields], parameter[name[self].record, name[tag]]]
|
keyword[def] identifier[keep_only_fields] ( identifier[self] ):
literal[string]
keyword[for] identifier[tag] keyword[in] identifier[self] . identifier[record] . identifier[keys] ():
keyword[if] identifier[tag] keyword[not] keyword[in] identifier[self] . identifier[fields_list] :
identifier[record_delete_fields] ( identifier[self] . identifier[record] , identifier[tag] )
|
def keep_only_fields(self):
"""Keep only fields listed in field_list."""
for tag in self.record.keys():
if tag not in self.fields_list:
record_delete_fields(self.record, tag) # depends on [control=['if'], data=['tag']] # depends on [control=['for'], data=['tag']]
|
def content_types(self):
"""
Provides access to content type management methods for content types of an environment.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/content-types
:return: :class:`EnvironmentContentTypesProxy <contentful_management.space_content_types_proxy.EnvironmentContentTypesProxy>` object.
:rtype: contentful.space_content_types_proxy.EnvironmentContentTypesProxy
Usage:
>>> space_content_types_proxy = environment.content_types()
<EnvironmentContentTypesProxy space_id="cfexampleapi" environment_id="master">
"""
return EnvironmentContentTypesProxy(self._client, self.space.id, self.id)
|
def function[content_types, parameter[self]]:
constant[
Provides access to content type management methods for content types of an environment.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/content-types
:return: :class:`EnvironmentContentTypesProxy <contentful_management.space_content_types_proxy.EnvironmentContentTypesProxy>` object.
:rtype: contentful.space_content_types_proxy.EnvironmentContentTypesProxy
Usage:
>>> space_content_types_proxy = environment.content_types()
<EnvironmentContentTypesProxy space_id="cfexampleapi" environment_id="master">
]
return[call[name[EnvironmentContentTypesProxy], parameter[name[self]._client, name[self].space.id, name[self].id]]]
|
keyword[def] identifier[content_types] ( identifier[self] ):
literal[string]
keyword[return] identifier[EnvironmentContentTypesProxy] ( identifier[self] . identifier[_client] , identifier[self] . identifier[space] . identifier[id] , identifier[self] . identifier[id] )
|
def content_types(self):
"""
Provides access to content type management methods for content types of an environment.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/content-types
:return: :class:`EnvironmentContentTypesProxy <contentful_management.space_content_types_proxy.EnvironmentContentTypesProxy>` object.
:rtype: contentful.space_content_types_proxy.EnvironmentContentTypesProxy
Usage:
>>> space_content_types_proxy = environment.content_types()
<EnvironmentContentTypesProxy space_id="cfexampleapi" environment_id="master">
"""
return EnvironmentContentTypesProxy(self._client, self.space.id, self.id)
|
def main(argv=sys.argv[1:], loop=None):
"""Parse argument and setup main program loop."""
args = docopt(__doc__, argv=argv,
version=pkg_resources.require('rflink')[0].version)
level = logging.ERROR
if args['-v']:
level = logging.INFO
if args['-v'] == 2:
level = logging.DEBUG
logging.basicConfig(level=level)
if not loop:
loop = asyncio.get_event_loop()
host = args['--host']
port = args['--port']
baud = args['--baud']
listenport = args['--listenport']
proxy = RFLinkProxy(port=port, host=host, baud=baud, loop=loop)
server_coro = asyncio.start_server(
proxy.client_connected_callback,
host="",
port=listenport,
loop=loop,
)
server = loop.run_until_complete(server_coro)
addr = server.sockets[0].getsockname()
log.info('Serving on %s', addr)
conn_coro = proxy.connect()
loop.run_until_complete(conn_coro)
proxy.closing = False
try:
loop.run_forever()
except KeyboardInterrupt:
proxy.closing = True
# cleanup server
server.close()
loop.run_until_complete(server.wait_closed())
# cleanup server connections
writers = [i[1] for i in list(clients)]
for writer in writers:
writer.close()
if sys.version_info >= (3, 7):
loop.run_until_complete(writer.wait_closed())
# cleanup RFLink connection
proxy.transport.close()
finally:
loop.close()
|
def function[main, parameter[argv, loop]]:
constant[Parse argument and setup main program loop.]
variable[args] assign[=] call[name[docopt], parameter[name[__doc__]]]
variable[level] assign[=] name[logging].ERROR
if call[name[args]][constant[-v]] begin[:]
variable[level] assign[=] name[logging].INFO
if compare[call[name[args]][constant[-v]] equal[==] constant[2]] begin[:]
variable[level] assign[=] name[logging].DEBUG
call[name[logging].basicConfig, parameter[]]
if <ast.UnaryOp object at 0x7da1b054a680> begin[:]
variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]]
variable[host] assign[=] call[name[args]][constant[--host]]
variable[port] assign[=] call[name[args]][constant[--port]]
variable[baud] assign[=] call[name[args]][constant[--baud]]
variable[listenport] assign[=] call[name[args]][constant[--listenport]]
variable[proxy] assign[=] call[name[RFLinkProxy], parameter[]]
variable[server_coro] assign[=] call[name[asyncio].start_server, parameter[name[proxy].client_connected_callback]]
variable[server] assign[=] call[name[loop].run_until_complete, parameter[name[server_coro]]]
variable[addr] assign[=] call[call[name[server].sockets][constant[0]].getsockname, parameter[]]
call[name[log].info, parameter[constant[Serving on %s], name[addr]]]
variable[conn_coro] assign[=] call[name[proxy].connect, parameter[]]
call[name[loop].run_until_complete, parameter[name[conn_coro]]]
name[proxy].closing assign[=] constant[False]
<ast.Try object at 0x7da1b031caf0>
|
keyword[def] identifier[main] ( identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :], identifier[loop] = keyword[None] ):
literal[string]
identifier[args] = identifier[docopt] ( identifier[__doc__] , identifier[argv] = identifier[argv] ,
identifier[version] = identifier[pkg_resources] . identifier[require] ( literal[string] )[ literal[int] ]. identifier[version] )
identifier[level] = identifier[logging] . identifier[ERROR]
keyword[if] identifier[args] [ literal[string] ]:
identifier[level] = identifier[logging] . identifier[INFO]
keyword[if] identifier[args] [ literal[string] ]== literal[int] :
identifier[level] = identifier[logging] . identifier[DEBUG]
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[level] )
keyword[if] keyword[not] identifier[loop] :
identifier[loop] = identifier[asyncio] . identifier[get_event_loop] ()
identifier[host] = identifier[args] [ literal[string] ]
identifier[port] = identifier[args] [ literal[string] ]
identifier[baud] = identifier[args] [ literal[string] ]
identifier[listenport] = identifier[args] [ literal[string] ]
identifier[proxy] = identifier[RFLinkProxy] ( identifier[port] = identifier[port] , identifier[host] = identifier[host] , identifier[baud] = identifier[baud] , identifier[loop] = identifier[loop] )
identifier[server_coro] = identifier[asyncio] . identifier[start_server] (
identifier[proxy] . identifier[client_connected_callback] ,
identifier[host] = literal[string] ,
identifier[port] = identifier[listenport] ,
identifier[loop] = identifier[loop] ,
)
identifier[server] = identifier[loop] . identifier[run_until_complete] ( identifier[server_coro] )
identifier[addr] = identifier[server] . identifier[sockets] [ literal[int] ]. identifier[getsockname] ()
identifier[log] . identifier[info] ( literal[string] , identifier[addr] )
identifier[conn_coro] = identifier[proxy] . identifier[connect] ()
identifier[loop] . identifier[run_until_complete] ( identifier[conn_coro] )
identifier[proxy] . identifier[closing] = keyword[False]
keyword[try] :
identifier[loop] . identifier[run_forever] ()
keyword[except] identifier[KeyboardInterrupt] :
identifier[proxy] . identifier[closing] = keyword[True]
identifier[server] . identifier[close] ()
identifier[loop] . identifier[run_until_complete] ( identifier[server] . identifier[wait_closed] ())
identifier[writers] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[list] ( identifier[clients] )]
keyword[for] identifier[writer] keyword[in] identifier[writers] :
identifier[writer] . identifier[close] ()
keyword[if] identifier[sys] . identifier[version_info] >=( literal[int] , literal[int] ):
identifier[loop] . identifier[run_until_complete] ( identifier[writer] . identifier[wait_closed] ())
identifier[proxy] . identifier[transport] . identifier[close] ()
keyword[finally] :
identifier[loop] . identifier[close] ()
|
def main(argv=sys.argv[1:], loop=None):
"""Parse argument and setup main program loop."""
args = docopt(__doc__, argv=argv, version=pkg_resources.require('rflink')[0].version)
level = logging.ERROR
if args['-v']:
level = logging.INFO # depends on [control=['if'], data=[]]
if args['-v'] == 2:
level = logging.DEBUG # depends on [control=['if'], data=[]]
logging.basicConfig(level=level)
if not loop:
loop = asyncio.get_event_loop() # depends on [control=['if'], data=[]]
host = args['--host']
port = args['--port']
baud = args['--baud']
listenport = args['--listenport']
proxy = RFLinkProxy(port=port, host=host, baud=baud, loop=loop)
server_coro = asyncio.start_server(proxy.client_connected_callback, host='', port=listenport, loop=loop)
server = loop.run_until_complete(server_coro)
addr = server.sockets[0].getsockname()
log.info('Serving on %s', addr)
conn_coro = proxy.connect()
loop.run_until_complete(conn_coro)
proxy.closing = False
try:
loop.run_forever() # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
proxy.closing = True
# cleanup server
server.close()
loop.run_until_complete(server.wait_closed())
# cleanup server connections
writers = [i[1] for i in list(clients)]
for writer in writers:
writer.close()
if sys.version_info >= (3, 7):
loop.run_until_complete(writer.wait_closed()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['writer']]
# cleanup RFLink connection
proxy.transport.close() # depends on [control=['except'], data=[]]
finally:
loop.close()
|
def async_stats_job_data(klass, account, url, **kwargs):
"""
Returns the results of the specified async job IDs
"""
resource = urlparse(url)
domain = '{0}://{1}'.format(resource.scheme, resource.netloc)
response = Request(account.client, 'get', resource.path, domain=domain,
raw_body=True, stream=True).perform()
return response.body
|
def function[async_stats_job_data, parameter[klass, account, url]]:
constant[
Returns the results of the specified async job IDs
]
variable[resource] assign[=] call[name[urlparse], parameter[name[url]]]
variable[domain] assign[=] call[constant[{0}://{1}].format, parameter[name[resource].scheme, name[resource].netloc]]
variable[response] assign[=] call[call[name[Request], parameter[name[account].client, constant[get], name[resource].path]].perform, parameter[]]
return[name[response].body]
|
keyword[def] identifier[async_stats_job_data] ( identifier[klass] , identifier[account] , identifier[url] ,** identifier[kwargs] ):
literal[string]
identifier[resource] = identifier[urlparse] ( identifier[url] )
identifier[domain] = literal[string] . identifier[format] ( identifier[resource] . identifier[scheme] , identifier[resource] . identifier[netloc] )
identifier[response] = identifier[Request] ( identifier[account] . identifier[client] , literal[string] , identifier[resource] . identifier[path] , identifier[domain] = identifier[domain] ,
identifier[raw_body] = keyword[True] , identifier[stream] = keyword[True] ). identifier[perform] ()
keyword[return] identifier[response] . identifier[body]
|
def async_stats_job_data(klass, account, url, **kwargs):
"""
Returns the results of the specified async job IDs
"""
resource = urlparse(url)
domain = '{0}://{1}'.format(resource.scheme, resource.netloc)
response = Request(account.client, 'get', resource.path, domain=domain, raw_body=True, stream=True).perform()
return response.body
|
def contains_unquoted_target(x: str,
quote: str = '"', target: str = '&') -> bool:
"""
Checks if ``target`` exists in ``x`` outside quotes (as defined by
``quote``). Principal use: from
:func:`contains_unquoted_ampersand_dangerous_to_windows`.
"""
in_quote = False
for c in x:
if c == quote:
in_quote = not in_quote
elif c == target:
if not in_quote:
return True
return False
|
def function[contains_unquoted_target, parameter[x, quote, target]]:
constant[
Checks if ``target`` exists in ``x`` outside quotes (as defined by
``quote``). Principal use: from
:func:`contains_unquoted_ampersand_dangerous_to_windows`.
]
variable[in_quote] assign[=] constant[False]
for taget[name[c]] in starred[name[x]] begin[:]
if compare[name[c] equal[==] name[quote]] begin[:]
variable[in_quote] assign[=] <ast.UnaryOp object at 0x7da1b170bac0>
return[constant[False]]
|
keyword[def] identifier[contains_unquoted_target] ( identifier[x] : identifier[str] ,
identifier[quote] : identifier[str] = literal[string] , identifier[target] : identifier[str] = literal[string] )-> identifier[bool] :
literal[string]
identifier[in_quote] = keyword[False]
keyword[for] identifier[c] keyword[in] identifier[x] :
keyword[if] identifier[c] == identifier[quote] :
identifier[in_quote] = keyword[not] identifier[in_quote]
keyword[elif] identifier[c] == identifier[target] :
keyword[if] keyword[not] identifier[in_quote] :
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def contains_unquoted_target(x: str, quote: str='"', target: str='&') -> bool:
"""
Checks if ``target`` exists in ``x`` outside quotes (as defined by
``quote``). Principal use: from
:func:`contains_unquoted_ampersand_dangerous_to_windows`.
"""
in_quote = False
for c in x:
if c == quote:
in_quote = not in_quote # depends on [control=['if'], data=[]]
elif c == target:
if not in_quote:
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']]
return False
|
def _after_indentation(res, options=None, fpath=''):
""" _after_indentation(res : dict):
Called after the string has been indented appropriately.
It takes care of writing the file and checking for unclosed strings
or comments.
"""
fname = os.path.basename(fpath)
opts = parse_options(options)
for msg in res['message_stack']:
if opts.warning:
if opts.files:
msg['fname'] = fname
sys.stderr.write('\n{fname}:{line}:{column}: {msg}'.format(**msg))
else:
# Input was passed through stdin
sys.stderr.write('\n:{line}:{column}: {msg}'.format(**msg))
if res['bracket_locations']:
# If the bracket_locations list is not empty it means that there are some
# brackets(opening) that haven't been closed.
for bracket in res['bracket_locations']:
line = bracket['line_number']
column = bracket['bracket_pos']
character = bracket['character']
# The bracket_locations are not very accurate. The warning might be
# misleading because it considers round and square brackets to be
# the same.
message = "\n%s:%d:%d: Unmatched `%s'"
if opts.warning:
sys.stderr.write(message % (fname, line, column, character))
if res['newlisp_brace_locations']:
for brace in res['newlisp_brace_locations']:
message = "\n%s:%d:%d: Unclosed newLISP brace string"
if opts.warning:
sys.stderr.write(message % (fname, brace[0], brace[1]))
if res['comment_locations']:
for comment in res['comment_locations']:
message = "\n%s:%d:%d: Unclosed multiline comment"
tpl = (fname,) + comment
if opts.warning:
sys.stderr.write(message % tpl)
if res['last_symbol_location']:
message = "\n%s:%d:%d: Unclosed symbol"
tpl = (fname,) + res['last_symbol_location']
if opts.warning:
sys.stderr.write(message % tpl)
if res['in_string']:
message = "\n%s:%d:%d: String extends to end-of-file"
tpl = (fname,) + res['last_quote_location']
if opts.warning:
sys.stderr.write(message % tpl)
if res['in_newlisp_tag_string']:
message = "\n%s:%d:%d: Tag string extends to end-of-file"
tpl = (fname,) + res['first_tag_string']
if opts.warning:
sys.stderr.write(message % tpl)
output_file = opts.output_file
if not output_file:
output_file = fpath
indented_code = res['indented_code']
indent_result = ''.join(indented_code)
if indented_code == res['original_code'] and opts.files:
message = "\nFile `%s' has already been formatted. Leaving it unchanged. . .\n"
sys.stderr.write(message % fname)
if output_file != fpath:
with open(output_file, 'wb') as indented_file:
indented_file.write(indent_result.encode('utf8'))
else:
if opts.output_diff:
diff = difflib.unified_diff(res['original_code'], indented_code, n=5)
if opts.colour_diff:
colour_diff(diff)
else:
print(''.join(list(diff)))
elif opts.output:
print(indent_result, end='')
if opts.modify:
# write in binary mode to preserve the original line ending
with open(output_file, 'wb') as indented_file:
indented_file.write(indent_result.encode('utf8'))
|
def function[_after_indentation, parameter[res, options, fpath]]:
constant[ _after_indentation(res : dict):
Called after the string has been indented appropriately.
It takes care of writing the file and checking for unclosed strings
or comments.
]
variable[fname] assign[=] call[name[os].path.basename, parameter[name[fpath]]]
variable[opts] assign[=] call[name[parse_options], parameter[name[options]]]
for taget[name[msg]] in starred[call[name[res]][constant[message_stack]]] begin[:]
if name[opts].warning begin[:]
if name[opts].files begin[:]
call[name[msg]][constant[fname]] assign[=] name[fname]
call[name[sys].stderr.write, parameter[call[constant[
{fname}:{line}:{column}: {msg}].format, parameter[]]]]
if call[name[res]][constant[bracket_locations]] begin[:]
for taget[name[bracket]] in starred[call[name[res]][constant[bracket_locations]]] begin[:]
variable[line] assign[=] call[name[bracket]][constant[line_number]]
variable[column] assign[=] call[name[bracket]][constant[bracket_pos]]
variable[character] assign[=] call[name[bracket]][constant[character]]
variable[message] assign[=] constant[
%s:%d:%d: Unmatched `%s']
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b14728c0>, <ast.Name object at 0x7da1b1471ab0>, <ast.Name object at 0x7da1b1470c10>, <ast.Name object at 0x7da1b14717e0>]]]]]
if call[name[res]][constant[newlisp_brace_locations]] begin[:]
for taget[name[brace]] in starred[call[name[res]][constant[newlisp_brace_locations]]] begin[:]
variable[message] assign[=] constant[
%s:%d:%d: Unclosed newLISP brace string]
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b14729b0>, <ast.Subscript object at 0x7da1b1473130>, <ast.Subscript object at 0x7da1b14700d0>]]]]]
if call[name[res]][constant[comment_locations]] begin[:]
for taget[name[comment]] in starred[call[name[res]][constant[comment_locations]]] begin[:]
variable[message] assign[=] constant[
%s:%d:%d: Unclosed multiline comment]
variable[tpl] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b1472ec0>]] + name[comment]]
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> name[tpl]]]]
if call[name[res]][constant[last_symbol_location]] begin[:]
variable[message] assign[=] constant[
%s:%d:%d: Unclosed symbol]
variable[tpl] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b14702e0>]] + call[name[res]][constant[last_symbol_location]]]
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> name[tpl]]]]
if call[name[res]][constant[in_string]] begin[:]
variable[message] assign[=] constant[
%s:%d:%d: String extends to end-of-file]
variable[tpl] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b1470e80>]] + call[name[res]][constant[last_quote_location]]]
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> name[tpl]]]]
if call[name[res]][constant[in_newlisp_tag_string]] begin[:]
variable[message] assign[=] constant[
%s:%d:%d: Tag string extends to end-of-file]
variable[tpl] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b1472bf0>]] + call[name[res]][constant[first_tag_string]]]
if name[opts].warning begin[:]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> name[tpl]]]]
variable[output_file] assign[=] name[opts].output_file
if <ast.UnaryOp object at 0x7da1b1472d40> begin[:]
variable[output_file] assign[=] name[fpath]
variable[indented_code] assign[=] call[name[res]][constant[indented_code]]
variable[indent_result] assign[=] call[constant[].join, parameter[name[indented_code]]]
if <ast.BoolOp object at 0x7da1b1472020> begin[:]
variable[message] assign[=] constant[
File `%s' has already been formatted. Leaving it unchanged. . .
]
call[name[sys].stderr.write, parameter[binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> name[fname]]]]
if compare[name[output_file] not_equal[!=] name[fpath]] begin[:]
with call[name[open], parameter[name[output_file], constant[wb]]] begin[:]
call[name[indented_file].write, parameter[call[name[indent_result].encode, parameter[constant[utf8]]]]]
|
keyword[def] identifier[_after_indentation] ( identifier[res] , identifier[options] = keyword[None] , identifier[fpath] = literal[string] ):
literal[string]
identifier[fname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[fpath] )
identifier[opts] = identifier[parse_options] ( identifier[options] )
keyword[for] identifier[msg] keyword[in] identifier[res] [ literal[string] ]:
keyword[if] identifier[opts] . identifier[warning] :
keyword[if] identifier[opts] . identifier[files] :
identifier[msg] [ literal[string] ]= identifier[fname]
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] (** identifier[msg] ))
keyword[else] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] (** identifier[msg] ))
keyword[if] identifier[res] [ literal[string] ]:
keyword[for] identifier[bracket] keyword[in] identifier[res] [ literal[string] ]:
identifier[line] = identifier[bracket] [ literal[string] ]
identifier[column] = identifier[bracket] [ literal[string] ]
identifier[character] = identifier[bracket] [ literal[string] ]
identifier[message] = literal[string]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] %( identifier[fname] , identifier[line] , identifier[column] , identifier[character] ))
keyword[if] identifier[res] [ literal[string] ]:
keyword[for] identifier[brace] keyword[in] identifier[res] [ literal[string] ]:
identifier[message] = literal[string]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] %( identifier[fname] , identifier[brace] [ literal[int] ], identifier[brace] [ literal[int] ]))
keyword[if] identifier[res] [ literal[string] ]:
keyword[for] identifier[comment] keyword[in] identifier[res] [ literal[string] ]:
identifier[message] = literal[string]
identifier[tpl] =( identifier[fname] ,)+ identifier[comment]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[tpl] )
keyword[if] identifier[res] [ literal[string] ]:
identifier[message] = literal[string]
identifier[tpl] =( identifier[fname] ,)+ identifier[res] [ literal[string] ]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[tpl] )
keyword[if] identifier[res] [ literal[string] ]:
identifier[message] = literal[string]
identifier[tpl] =( identifier[fname] ,)+ identifier[res] [ literal[string] ]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[tpl] )
keyword[if] identifier[res] [ literal[string] ]:
identifier[message] = literal[string]
identifier[tpl] =( identifier[fname] ,)+ identifier[res] [ literal[string] ]
keyword[if] identifier[opts] . identifier[warning] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[tpl] )
identifier[output_file] = identifier[opts] . identifier[output_file]
keyword[if] keyword[not] identifier[output_file] :
identifier[output_file] = identifier[fpath]
identifier[indented_code] = identifier[res] [ literal[string] ]
identifier[indent_result] = literal[string] . identifier[join] ( identifier[indented_code] )
keyword[if] identifier[indented_code] == identifier[res] [ literal[string] ] keyword[and] identifier[opts] . identifier[files] :
identifier[message] = literal[string]
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[message] % identifier[fname] )
keyword[if] identifier[output_file] != identifier[fpath] :
keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[indented_file] :
identifier[indented_file] . identifier[write] ( identifier[indent_result] . identifier[encode] ( literal[string] ))
keyword[else] :
keyword[if] identifier[opts] . identifier[output_diff] :
identifier[diff] = identifier[difflib] . identifier[unified_diff] ( identifier[res] [ literal[string] ], identifier[indented_code] , identifier[n] = literal[int] )
keyword[if] identifier[opts] . identifier[colour_diff] :
identifier[colour_diff] ( identifier[diff] )
keyword[else] :
identifier[print] ( literal[string] . identifier[join] ( identifier[list] ( identifier[diff] )))
keyword[elif] identifier[opts] . identifier[output] :
identifier[print] ( identifier[indent_result] , identifier[end] = literal[string] )
keyword[if] identifier[opts] . identifier[modify] :
keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[indented_file] :
identifier[indented_file] . identifier[write] ( identifier[indent_result] . identifier[encode] ( literal[string] ))
|
def _after_indentation(res, options=None, fpath=''):
""" _after_indentation(res : dict):
Called after the string has been indented appropriately.
It takes care of writing the file and checking for unclosed strings
or comments.
"""
fname = os.path.basename(fpath)
opts = parse_options(options)
for msg in res['message_stack']:
if opts.warning:
if opts.files:
msg['fname'] = fname
sys.stderr.write('\n{fname}:{line}:{column}: {msg}'.format(**msg)) # depends on [control=['if'], data=[]]
else:
# Input was passed through stdin
sys.stderr.write('\n:{line}:{column}: {msg}'.format(**msg)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['msg']]
if res['bracket_locations']:
# If the bracket_locations list is not empty it means that there are some
# brackets(opening) that haven't been closed.
for bracket in res['bracket_locations']:
line = bracket['line_number']
column = bracket['bracket_pos']
character = bracket['character']
# The bracket_locations are not very accurate. The warning might be
# misleading because it considers round and square brackets to be
# the same.
message = "\n%s:%d:%d: Unmatched `%s'"
if opts.warning:
sys.stderr.write(message % (fname, line, column, character)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bracket']] # depends on [control=['if'], data=[]]
if res['newlisp_brace_locations']:
for brace in res['newlisp_brace_locations']:
message = '\n%s:%d:%d: Unclosed newLISP brace string'
if opts.warning:
sys.stderr.write(message % (fname, brace[0], brace[1])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['brace']] # depends on [control=['if'], data=[]]
if res['comment_locations']:
for comment in res['comment_locations']:
message = '\n%s:%d:%d: Unclosed multiline comment'
tpl = (fname,) + comment
if opts.warning:
sys.stderr.write(message % tpl) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comment']] # depends on [control=['if'], data=[]]
if res['last_symbol_location']:
message = '\n%s:%d:%d: Unclosed symbol'
tpl = (fname,) + res['last_symbol_location']
if opts.warning:
sys.stderr.write(message % tpl) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if res['in_string']:
message = '\n%s:%d:%d: String extends to end-of-file'
tpl = (fname,) + res['last_quote_location']
if opts.warning:
sys.stderr.write(message % tpl) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if res['in_newlisp_tag_string']:
message = '\n%s:%d:%d: Tag string extends to end-of-file'
tpl = (fname,) + res['first_tag_string']
if opts.warning:
sys.stderr.write(message % tpl) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
output_file = opts.output_file
if not output_file:
output_file = fpath # depends on [control=['if'], data=[]]
indented_code = res['indented_code']
indent_result = ''.join(indented_code)
if indented_code == res['original_code'] and opts.files:
message = "\nFile `%s' has already been formatted. Leaving it unchanged. . .\n"
sys.stderr.write(message % fname)
if output_file != fpath:
with open(output_file, 'wb') as indented_file:
indented_file.write(indent_result.encode('utf8')) # depends on [control=['with'], data=['indented_file']] # depends on [control=['if'], data=['output_file']] # depends on [control=['if'], data=[]]
else:
if opts.output_diff:
diff = difflib.unified_diff(res['original_code'], indented_code, n=5)
if opts.colour_diff:
colour_diff(diff) # depends on [control=['if'], data=[]]
else:
print(''.join(list(diff))) # depends on [control=['if'], data=[]]
elif opts.output:
print(indent_result, end='') # depends on [control=['if'], data=[]]
if opts.modify:
# write in binary mode to preserve the original line ending
with open(output_file, 'wb') as indented_file:
indented_file.write(indent_result.encode('utf8')) # depends on [control=['with'], data=['indented_file']] # depends on [control=['if'], data=[]]
|
def get_network_settings():
'''
Return the contents of the global network script.
CLI Example:
.. code-block:: bash
salt '*' ip.get_network_settings
'''
if __grains__['lsb_distrib_id'] == 'nilrt':
raise salt.exceptions.CommandExecutionError('Not supported in this version.')
settings = []
networking = 'no' if _get_state() == 'offline' else 'yes'
settings.append('networking={0}'.format(networking))
hostname = __salt__['network.get_hostname']
settings.append('hostname={0}'.format(hostname))
return settings
|
def function[get_network_settings, parameter[]]:
constant[
Return the contents of the global network script.
CLI Example:
.. code-block:: bash
salt '*' ip.get_network_settings
]
if compare[call[name[__grains__]][constant[lsb_distrib_id]] equal[==] constant[nilrt]] begin[:]
<ast.Raise object at 0x7da18bc72e90>
variable[settings] assign[=] list[[]]
variable[networking] assign[=] <ast.IfExp object at 0x7da18bc71000>
call[name[settings].append, parameter[call[constant[networking={0}].format, parameter[name[networking]]]]]
variable[hostname] assign[=] call[name[__salt__]][constant[network.get_hostname]]
call[name[settings].append, parameter[call[constant[hostname={0}].format, parameter[name[hostname]]]]]
return[name[settings]]
|
keyword[def] identifier[get_network_settings] ():
literal[string]
keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] :
keyword[raise] identifier[salt] . identifier[exceptions] . identifier[CommandExecutionError] ( literal[string] )
identifier[settings] =[]
identifier[networking] = literal[string] keyword[if] identifier[_get_state] ()== literal[string] keyword[else] literal[string]
identifier[settings] . identifier[append] ( literal[string] . identifier[format] ( identifier[networking] ))
identifier[hostname] = identifier[__salt__] [ literal[string] ]
identifier[settings] . identifier[append] ( literal[string] . identifier[format] ( identifier[hostname] ))
keyword[return] identifier[settings]
|
def get_network_settings():
"""
Return the contents of the global network script.
CLI Example:
.. code-block:: bash
salt '*' ip.get_network_settings
"""
if __grains__['lsb_distrib_id'] == 'nilrt':
raise salt.exceptions.CommandExecutionError('Not supported in this version.') # depends on [control=['if'], data=[]]
settings = []
networking = 'no' if _get_state() == 'offline' else 'yes'
settings.append('networking={0}'.format(networking))
hostname = __salt__['network.get_hostname']
settings.append('hostname={0}'.format(hostname))
return settings
|
def build_db():
"""Get a structured dataset out of
http://download.geonames.org/export/dump/cities1000.txt
"""
if not os.path.exists(MISC_PATH):
os.makedirs(MISC_PATH)
cities_path = os.path.join(MISC_PATH, "cities1000.txt")
cities_msgpack = os.path.join(MISC_PATH, "cities1000.bin")
if not os.path.isfile(cities_path):
download()
if not os.path.isfile(cities_msgpack):
with open(cities_path) as f:
lines = f.readlines()
db = []
for i, line in enumerate(lines):
l = line.strip().split('\t')
db.append({'latitude': float(l[4]),
'longitude': float(l[5]),
'linenr': i})
packed = msgpack.packb(db, use_bin_type=True)
with open(cities_msgpack, 'wb') as f:
f.write(packed)
else:
with open(cities_msgpack, 'rb') as f:
content = f.read()
db = msgpack.unpackb(content)
return db
|
def function[build_db, parameter[]]:
constant[Get a structured dataset out of
http://download.geonames.org/export/dump/cities1000.txt
]
if <ast.UnaryOp object at 0x7da1b149c5e0> begin[:]
call[name[os].makedirs, parameter[name[MISC_PATH]]]
variable[cities_path] assign[=] call[name[os].path.join, parameter[name[MISC_PATH], constant[cities1000.txt]]]
variable[cities_msgpack] assign[=] call[name[os].path.join, parameter[name[MISC_PATH], constant[cities1000.bin]]]
if <ast.UnaryOp object at 0x7da1b149f940> begin[:]
call[name[download], parameter[]]
if <ast.UnaryOp object at 0x7da1b149f7f0> begin[:]
with call[name[open], parameter[name[cities_path]]] begin[:]
variable[lines] assign[=] call[name[f].readlines, parameter[]]
variable[db] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b14c5120>, <ast.Name object at 0x7da1b14c4910>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:]
variable[l] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[ ]]]
call[name[db].append, parameter[dictionary[[<ast.Constant object at 0x7da1b14c4ac0>, <ast.Constant object at 0x7da1b14c6c20>, <ast.Constant object at 0x7da1b14c7df0>], [<ast.Call object at 0x7da1b14c6c80>, <ast.Call object at 0x7da1b14c4d00>, <ast.Name object at 0x7da1b14c5270>]]]]
variable[packed] assign[=] call[name[msgpack].packb, parameter[name[db]]]
with call[name[open], parameter[name[cities_msgpack], constant[wb]]] begin[:]
call[name[f].write, parameter[name[packed]]]
return[name[db]]
|
keyword[def] identifier[build_db] ():
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[MISC_PATH] ):
identifier[os] . identifier[makedirs] ( identifier[MISC_PATH] )
identifier[cities_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[MISC_PATH] , literal[string] )
identifier[cities_msgpack] = identifier[os] . identifier[path] . identifier[join] ( identifier[MISC_PATH] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[cities_path] ):
identifier[download] ()
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[cities_msgpack] ):
keyword[with] identifier[open] ( identifier[cities_path] ) keyword[as] identifier[f] :
identifier[lines] = identifier[f] . identifier[readlines] ()
identifier[db] =[]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[lines] ):
identifier[l] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )
identifier[db] . identifier[append] ({ literal[string] : identifier[float] ( identifier[l] [ literal[int] ]),
literal[string] : identifier[float] ( identifier[l] [ literal[int] ]),
literal[string] : identifier[i] })
identifier[packed] = identifier[msgpack] . identifier[packb] ( identifier[db] , identifier[use_bin_type] = keyword[True] )
keyword[with] identifier[open] ( identifier[cities_msgpack] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[packed] )
keyword[else] :
keyword[with] identifier[open] ( identifier[cities_msgpack] , literal[string] ) keyword[as] identifier[f] :
identifier[content] = identifier[f] . identifier[read] ()
identifier[db] = identifier[msgpack] . identifier[unpackb] ( identifier[content] )
keyword[return] identifier[db]
|
def build_db():
"""Get a structured dataset out of
http://download.geonames.org/export/dump/cities1000.txt
"""
if not os.path.exists(MISC_PATH):
os.makedirs(MISC_PATH) # depends on [control=['if'], data=[]]
cities_path = os.path.join(MISC_PATH, 'cities1000.txt')
cities_msgpack = os.path.join(MISC_PATH, 'cities1000.bin')
if not os.path.isfile(cities_path):
download() # depends on [control=['if'], data=[]]
if not os.path.isfile(cities_msgpack):
with open(cities_path) as f:
lines = f.readlines() # depends on [control=['with'], data=['f']]
db = []
for (i, line) in enumerate(lines):
l = line.strip().split('\t')
db.append({'latitude': float(l[4]), 'longitude': float(l[5]), 'linenr': i}) # depends on [control=['for'], data=[]]
packed = msgpack.packb(db, use_bin_type=True)
with open(cities_msgpack, 'wb') as f:
f.write(packed) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
with open(cities_msgpack, 'rb') as f:
content = f.read() # depends on [control=['with'], data=['f']]
db = msgpack.unpackb(content)
return db
|
def __undo_filter_average(self, scanline):
"""Undo average filter."""
ai = -self.fu
previous = self.prev
for i in range(len(scanline)):
x = scanline[i]
if ai < 0:
a = 0
else:
a = scanline[ai] # result
b = previous[i]
scanline[i] = (x + ((a + b) >> 1)) & 0xff # result
ai += 1
|
def function[__undo_filter_average, parameter[self, scanline]]:
constant[Undo average filter.]
variable[ai] assign[=] <ast.UnaryOp object at 0x7da18f00e5c0>
variable[previous] assign[=] name[self].prev
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[scanline]]]]]] begin[:]
variable[x] assign[=] call[name[scanline]][name[i]]
if compare[name[ai] less[<] constant[0]] begin[:]
variable[a] assign[=] constant[0]
variable[b] assign[=] call[name[previous]][name[i]]
call[name[scanline]][name[i]] assign[=] binary_operation[binary_operation[name[x] + binary_operation[binary_operation[name[a] + name[b]] <ast.RShift object at 0x7da2590d6a40> constant[1]]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]
<ast.AugAssign object at 0x7da2045661a0>
|
keyword[def] identifier[__undo_filter_average] ( identifier[self] , identifier[scanline] ):
literal[string]
identifier[ai] =- identifier[self] . identifier[fu]
identifier[previous] = identifier[self] . identifier[prev]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[scanline] )):
identifier[x] = identifier[scanline] [ identifier[i] ]
keyword[if] identifier[ai] < literal[int] :
identifier[a] = literal[int]
keyword[else] :
identifier[a] = identifier[scanline] [ identifier[ai] ]
identifier[b] = identifier[previous] [ identifier[i] ]
identifier[scanline] [ identifier[i] ]=( identifier[x] +(( identifier[a] + identifier[b] )>> literal[int] ))& literal[int]
identifier[ai] += literal[int]
|
def __undo_filter_average(self, scanline):
"""Undo average filter."""
ai = -self.fu
previous = self.prev
for i in range(len(scanline)):
x = scanline[i]
if ai < 0:
a = 0 # depends on [control=['if'], data=[]]
else:
a = scanline[ai] # result
b = previous[i]
scanline[i] = x + (a + b >> 1) & 255 # result
ai += 1 # depends on [control=['for'], data=['i']]
|
def commit(self):
"""
Commit transaction which is currently in progress.
"""
self._assert_open()
if self._autocommit:
return
if not self._conn.tds72_transaction:
return
self._main_cursor._commit(cont=True, isolation_level=self._isolation_level)
|
def function[commit, parameter[self]]:
constant[
Commit transaction which is currently in progress.
]
call[name[self]._assert_open, parameter[]]
if name[self]._autocommit begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b0579000> begin[:]
return[None]
call[name[self]._main_cursor._commit, parameter[]]
|
keyword[def] identifier[commit] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_assert_open] ()
keyword[if] identifier[self] . identifier[_autocommit] :
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[_conn] . identifier[tds72_transaction] :
keyword[return]
identifier[self] . identifier[_main_cursor] . identifier[_commit] ( identifier[cont] = keyword[True] , identifier[isolation_level] = identifier[self] . identifier[_isolation_level] )
|
def commit(self):
"""
Commit transaction which is currently in progress.
"""
self._assert_open()
if self._autocommit:
return # depends on [control=['if'], data=[]]
if not self._conn.tds72_transaction:
return # depends on [control=['if'], data=[]]
self._main_cursor._commit(cont=True, isolation_level=self._isolation_level)
|
def _cache(self, func, func_memory_level=1, **kwargs):
""" Return a joblib.Memory object.
The memory_level determines the level above which the wrapped
function output is cached. By specifying a numeric value for
this level, the user can to control the amount of cache memory
used. This function will cache the function call or not
depending on the cache level.
Parameters
----------
func: function
The function the output of which is to be cached.
memory_level: int
The memory_level from which caching must be enabled for the wrapped
function.
Returns
-------
mem: joblib.Memory
object that wraps the function func. This object may be
a no-op, if the requested level is lower than the value given
to _cache()). For consistency, a joblib.Memory object is always
returned.
"""
verbose = getattr(self, 'verbose', 0)
# Creates attributes if they don't exist
# This is to make creating them in __init__() optional.
if not hasattr(self, "memory_level"):
self.memory_level = 0
if not hasattr(self, "memory"):
self.memory = Memory(cachedir=None, verbose=verbose)
if isinstance(self.memory, _basestring):
self.memory = Memory(cachedir=self.memory, verbose=verbose)
# If cache level is 0 but a memory object has been provided, set
# memory_level to 1 with a warning.
if self.memory_level == 0:
if (isinstance(self.memory, _basestring)
or self.memory.cachedir is not None):
warnings.warn("memory_level is currently set to 0 but "
"a Memory object has been provided. "
"Setting memory_level to 1.")
self.memory_level = 1
return cache(func, self.memory, func_memory_level=func_memory_level,
memory_level=self.memory_level, **kwargs)
|
def function[_cache, parameter[self, func, func_memory_level]]:
constant[ Return a joblib.Memory object.
The memory_level determines the level above which the wrapped
function output is cached. By specifying a numeric value for
this level, the user can to control the amount of cache memory
used. This function will cache the function call or not
depending on the cache level.
Parameters
----------
func: function
The function the output of which is to be cached.
memory_level: int
The memory_level from which caching must be enabled for the wrapped
function.
Returns
-------
mem: joblib.Memory
object that wraps the function func. This object may be
a no-op, if the requested level is lower than the value given
to _cache()). For consistency, a joblib.Memory object is always
returned.
]
variable[verbose] assign[=] call[name[getattr], parameter[name[self], constant[verbose], constant[0]]]
if <ast.UnaryOp object at 0x7da1b008fc40> begin[:]
name[self].memory_level assign[=] constant[0]
if <ast.UnaryOp object at 0x7da1b008d060> begin[:]
name[self].memory assign[=] call[name[Memory], parameter[]]
if call[name[isinstance], parameter[name[self].memory, name[_basestring]]] begin[:]
name[self].memory assign[=] call[name[Memory], parameter[]]
if compare[name[self].memory_level equal[==] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da1b004fc40> begin[:]
call[name[warnings].warn, parameter[constant[memory_level is currently set to 0 but a Memory object has been provided. Setting memory_level to 1.]]]
name[self].memory_level assign[=] constant[1]
return[call[name[cache], parameter[name[func], name[self].memory]]]
|
keyword[def] identifier[_cache] ( identifier[self] , identifier[func] , identifier[func_memory_level] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[verbose] = identifier[getattr] ( identifier[self] , literal[string] , literal[int] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[memory_level] = literal[int]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[memory] = identifier[Memory] ( identifier[cachedir] = keyword[None] , identifier[verbose] = identifier[verbose] )
keyword[if] identifier[isinstance] ( identifier[self] . identifier[memory] , identifier[_basestring] ):
identifier[self] . identifier[memory] = identifier[Memory] ( identifier[cachedir] = identifier[self] . identifier[memory] , identifier[verbose] = identifier[verbose] )
keyword[if] identifier[self] . identifier[memory_level] == literal[int] :
keyword[if] ( identifier[isinstance] ( identifier[self] . identifier[memory] , identifier[_basestring] )
keyword[or] identifier[self] . identifier[memory] . identifier[cachedir] keyword[is] keyword[not] keyword[None] ):
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string] )
identifier[self] . identifier[memory_level] = literal[int]
keyword[return] identifier[cache] ( identifier[func] , identifier[self] . identifier[memory] , identifier[func_memory_level] = identifier[func_memory_level] ,
identifier[memory_level] = identifier[self] . identifier[memory_level] ,** identifier[kwargs] )
|
def _cache(self, func, func_memory_level=1, **kwargs):
""" Return a joblib.Memory object.
The memory_level determines the level above which the wrapped
function output is cached. By specifying a numeric value for
this level, the user can to control the amount of cache memory
used. This function will cache the function call or not
depending on the cache level.
Parameters
----------
func: function
The function the output of which is to be cached.
memory_level: int
The memory_level from which caching must be enabled for the wrapped
function.
Returns
-------
mem: joblib.Memory
object that wraps the function func. This object may be
a no-op, if the requested level is lower than the value given
to _cache()). For consistency, a joblib.Memory object is always
returned.
"""
verbose = getattr(self, 'verbose', 0)
# Creates attributes if they don't exist
# This is to make creating them in __init__() optional.
if not hasattr(self, 'memory_level'):
self.memory_level = 0 # depends on [control=['if'], data=[]]
if not hasattr(self, 'memory'):
self.memory = Memory(cachedir=None, verbose=verbose) # depends on [control=['if'], data=[]]
if isinstance(self.memory, _basestring):
self.memory = Memory(cachedir=self.memory, verbose=verbose) # depends on [control=['if'], data=[]]
# If cache level is 0 but a memory object has been provided, set
# memory_level to 1 with a warning.
if self.memory_level == 0:
if isinstance(self.memory, _basestring) or self.memory.cachedir is not None:
warnings.warn('memory_level is currently set to 0 but a Memory object has been provided. Setting memory_level to 1.')
self.memory_level = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return cache(func, self.memory, func_memory_level=func_memory_level, memory_level=self.memory_level, **kwargs)
|
def contains(self, other):
"""
Is the other VariantSequence a subsequence of this one?
The two sequences must agree on the alt nucleotides, the prefix of the
longer must contain the prefix of the shorter, and the suffix of the
longer must contain the suffix of the shorter.
"""
return (self.alt == other.alt and
self.prefix.endswith(other.prefix) and
self.suffix.startswith(other.suffix))
|
def function[contains, parameter[self, other]]:
constant[
Is the other VariantSequence a subsequence of this one?
The two sequences must agree on the alt nucleotides, the prefix of the
longer must contain the prefix of the shorter, and the suffix of the
longer must contain the suffix of the shorter.
]
return[<ast.BoolOp object at 0x7da1b257fa90>]
|
keyword[def] identifier[contains] ( identifier[self] , identifier[other] ):
literal[string]
keyword[return] ( identifier[self] . identifier[alt] == identifier[other] . identifier[alt] keyword[and]
identifier[self] . identifier[prefix] . identifier[endswith] ( identifier[other] . identifier[prefix] ) keyword[and]
identifier[self] . identifier[suffix] . identifier[startswith] ( identifier[other] . identifier[suffix] ))
|
def contains(self, other):
"""
Is the other VariantSequence a subsequence of this one?
The two sequences must agree on the alt nucleotides, the prefix of the
longer must contain the prefix of the shorter, and the suffix of the
longer must contain the suffix of the shorter.
"""
return self.alt == other.alt and self.prefix.endswith(other.prefix) and self.suffix.startswith(other.suffix)
|
def search_people_by_bio(query, limit_results=DEFAULT_LIMIT,
index=['onename_people_index']):
""" queries lucene index to find a nearest match, output is profile username
"""
from pyes import QueryStringQuery, ES
conn = ES()
q = QueryStringQuery(query,
search_fields=['username', 'profile_bio'],
default_operator='and')
results = conn.search(query=q, size=20, indices=index)
count = conn.count(query=q)
count = count.count
# having 'or' gives more results but results quality goes down
if(count == 0):
q = QueryStringQuery(query,
search_fields=['username', 'profile_bio'],
default_operator='or')
results = conn.search(query=q, size=20, indices=index)
results_list = []
counter = 0
for profile in results:
username = profile['username']
results_list.append(username)
counter += 1
if(counter == limit_results):
break
return results_list
|
def function[search_people_by_bio, parameter[query, limit_results, index]]:
constant[ queries lucene index to find a nearest match, output is profile username
]
from relative_module[pyes] import module[QueryStringQuery], module[ES]
variable[conn] assign[=] call[name[ES], parameter[]]
variable[q] assign[=] call[name[QueryStringQuery], parameter[name[query]]]
variable[results] assign[=] call[name[conn].search, parameter[]]
variable[count] assign[=] call[name[conn].count, parameter[]]
variable[count] assign[=] name[count].count
if compare[name[count] equal[==] constant[0]] begin[:]
variable[q] assign[=] call[name[QueryStringQuery], parameter[name[query]]]
variable[results] assign[=] call[name[conn].search, parameter[]]
variable[results_list] assign[=] list[[]]
variable[counter] assign[=] constant[0]
for taget[name[profile]] in starred[name[results]] begin[:]
variable[username] assign[=] call[name[profile]][constant[username]]
call[name[results_list].append, parameter[name[username]]]
<ast.AugAssign object at 0x7da18bccaad0>
if compare[name[counter] equal[==] name[limit_results]] begin[:]
break
return[name[results_list]]
|
keyword[def] identifier[search_people_by_bio] ( identifier[query] , identifier[limit_results] = identifier[DEFAULT_LIMIT] ,
identifier[index] =[ literal[string] ]):
literal[string]
keyword[from] identifier[pyes] keyword[import] identifier[QueryStringQuery] , identifier[ES]
identifier[conn] = identifier[ES] ()
identifier[q] = identifier[QueryStringQuery] ( identifier[query] ,
identifier[search_fields] =[ literal[string] , literal[string] ],
identifier[default_operator] = literal[string] )
identifier[results] = identifier[conn] . identifier[search] ( identifier[query] = identifier[q] , identifier[size] = literal[int] , identifier[indices] = identifier[index] )
identifier[count] = identifier[conn] . identifier[count] ( identifier[query] = identifier[q] )
identifier[count] = identifier[count] . identifier[count]
keyword[if] ( identifier[count] == literal[int] ):
identifier[q] = identifier[QueryStringQuery] ( identifier[query] ,
identifier[search_fields] =[ literal[string] , literal[string] ],
identifier[default_operator] = literal[string] )
identifier[results] = identifier[conn] . identifier[search] ( identifier[query] = identifier[q] , identifier[size] = literal[int] , identifier[indices] = identifier[index] )
identifier[results_list] =[]
identifier[counter] = literal[int]
keyword[for] identifier[profile] keyword[in] identifier[results] :
identifier[username] = identifier[profile] [ literal[string] ]
identifier[results_list] . identifier[append] ( identifier[username] )
identifier[counter] += literal[int]
keyword[if] ( identifier[counter] == identifier[limit_results] ):
keyword[break]
keyword[return] identifier[results_list]
|
def search_people_by_bio(query, limit_results=DEFAULT_LIMIT, index=['onename_people_index']):
""" queries lucene index to find a nearest match, output is profile username
"""
from pyes import QueryStringQuery, ES
conn = ES()
q = QueryStringQuery(query, search_fields=['username', 'profile_bio'], default_operator='and')
results = conn.search(query=q, size=20, indices=index)
count = conn.count(query=q)
count = count.count
# having 'or' gives more results but results quality goes down
if count == 0:
q = QueryStringQuery(query, search_fields=['username', 'profile_bio'], default_operator='or')
results = conn.search(query=q, size=20, indices=index) # depends on [control=['if'], data=[]]
results_list = []
counter = 0
for profile in results:
username = profile['username']
results_list.append(username)
counter += 1
if counter == limit_results:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['profile']]
return results_list
|
def skeleton_to_pdag(skel, separating_sets):
"""Orients the edges of a graph skeleton based on information from
`separating_sets` to form a DAG pattern (DAG).
Parameters
----------
skel: UndirectedGraph
An undirected graph skeleton as e.g. produced by the
estimate_skeleton method.
separating_sets: dict
A dict containing for each pair of not directly connected nodes a
separating set ("witnessing set") of variables that makes then
conditionally independent. (needed for edge orientation)
Returns
-------
pdag: DAG
An estimate for the DAG pattern of the BN underlying the data. The
graph might contain some nodes with both-way edges (X->Y and Y->X).
Any completion by (removing one of the both-way edges for each such
pair) results in a I-equivalent Bayesian network DAG.
Reference
---------
Neapolitan, Learning Bayesian Networks, Section 10.1.2, Algorithm 10.2 (page 550)
http://www.cs.technion.ac.il/~dang/books/Learning%20Bayesian%20Networks(Neapolitan,%20Richard).pdf
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from pgmpy.estimators import ConstraintBasedEstimator
>>> data = pd.DataFrame(np.random.randint(0, 4, size=(5000, 3)), columns=list('ABD'))
>>> data['C'] = data['A'] - data['B']
>>> data['D'] += data['A']
>>> c = ConstraintBasedEstimator(data)
>>> pdag = c.skeleton_to_pdag(*c.estimate_skeleton())
>>> pdag.edges() # edges: A->C, B->C, A--D (not directed)
[('B', 'C'), ('A', 'C'), ('A', 'D'), ('D', 'A')]
"""
pdag = skel.to_directed()
node_pairs = combinations(pdag.nodes(), 2)
# 1) for each X-Z-Y, if Z not in the separating set of X,Y, then orient edges as X->Z<-Y
# (Algorithm 3.4 in Koller & Friedman PGM, page 86)
for X, Y in node_pairs:
if not skel.has_edge(X, Y):
for Z in set(skel.neighbors(X)) & set(skel.neighbors(Y)):
if Z not in separating_sets[frozenset((X, Y))]:
pdag.remove_edges_from([(Z, X), (Z, Y)])
progress = True
while progress: # as long as edges can be oriented (removed)
num_edges = pdag.number_of_edges()
# 2) for each X->Z-Y, orient edges to Z->Y
for X, Y in node_pairs:
for Z in ((set(pdag.successors(X)) - set(pdag.predecessors(X))) &
(set(pdag.successors(Y)) & set(pdag.predecessors(Y)))):
pdag.remove(Y, Z)
# 3) for each X-Y with a directed path from X to Y, orient edges to X->Y
for X, Y in node_pairs:
for path in nx.all_simple_paths(pdag, X, Y):
is_directed = True
for src, dst in path:
if pdag.has_edge(dst, src):
is_directed = False
if is_directed:
pdag.remove(Y, X)
break
# 4) for each X-Z-Y with X->W, Y->W, and Z-W, orient edges to Z->W
for X, Y in node_pairs:
for Z in (set(pdag.successors(X)) & set(pdag.predecessors(X)) &
set(pdag.successors(Y)) & set(pdag.predecessors(Y))):
for W in ((set(pdag.successors(X)) - set(pdag.predecessors(X))) &
(set(pdag.successors(Y)) - set(pdag.predecessors(Y))) &
(set(pdag.successors(Z)) & set(pdag.predecessors(Z)))):
pdag.remove(W, Z)
progress = num_edges > pdag.number_of_edges()
return pdag
|
def function[skeleton_to_pdag, parameter[skel, separating_sets]]:
constant[Orients the edges of a graph skeleton based on information from
`separating_sets` to form a DAG pattern (DAG).
Parameters
----------
skel: UndirectedGraph
An undirected graph skeleton as e.g. produced by the
estimate_skeleton method.
separating_sets: dict
A dict containing for each pair of not directly connected nodes a
separating set ("witnessing set") of variables that makes then
conditionally independent. (needed for edge orientation)
Returns
-------
pdag: DAG
An estimate for the DAG pattern of the BN underlying the data. The
graph might contain some nodes with both-way edges (X->Y and Y->X).
Any completion by (removing one of the both-way edges for each such
pair) results in a I-equivalent Bayesian network DAG.
Reference
---------
Neapolitan, Learning Bayesian Networks, Section 10.1.2, Algorithm 10.2 (page 550)
http://www.cs.technion.ac.il/~dang/books/Learning%20Bayesian%20Networks(Neapolitan,%20Richard).pdf
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from pgmpy.estimators import ConstraintBasedEstimator
>>> data = pd.DataFrame(np.random.randint(0, 4, size=(5000, 3)), columns=list('ABD'))
>>> data['C'] = data['A'] - data['B']
>>> data['D'] += data['A']
>>> c = ConstraintBasedEstimator(data)
>>> pdag = c.skeleton_to_pdag(*c.estimate_skeleton())
>>> pdag.edges() # edges: A->C, B->C, A--D (not directed)
[('B', 'C'), ('A', 'C'), ('A', 'D'), ('D', 'A')]
]
variable[pdag] assign[=] call[name[skel].to_directed, parameter[]]
variable[node_pairs] assign[=] call[name[combinations], parameter[call[name[pdag].nodes, parameter[]], constant[2]]]
for taget[tuple[[<ast.Name object at 0x7da18ede7670>, <ast.Name object at 0x7da18ede56c0>]]] in starred[name[node_pairs]] begin[:]
if <ast.UnaryOp object at 0x7da18ede5b40> begin[:]
for taget[name[Z]] in starred[binary_operation[call[name[set], parameter[call[name[skel].neighbors, parameter[name[X]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[skel].neighbors, parameter[name[Y]]]]]]] begin[:]
if compare[name[Z] <ast.NotIn object at 0x7da2590d7190> call[name[separating_sets]][call[name[frozenset], parameter[tuple[[<ast.Name object at 0x7da18ede52d0>, <ast.Name object at 0x7da18ede7c70>]]]]]] begin[:]
call[name[pdag].remove_edges_from, parameter[list[[<ast.Tuple object at 0x7da1b26af850>, <ast.Tuple object at 0x7da1b26ae950>]]]]
variable[progress] assign[=] constant[True]
while name[progress] begin[:]
variable[num_edges] assign[=] call[name[pdag].number_of_edges, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b26acee0>, <ast.Name object at 0x7da1b26af040>]]] in starred[name[node_pairs]] begin[:]
for taget[name[Z]] in starred[binary_operation[binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[X]]]]] - call[name[set], parameter[call[name[pdag].predecessors, parameter[name[X]]]]]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[Y]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[pdag].predecessors, parameter[name[Y]]]]]]]] begin[:]
call[name[pdag].remove, parameter[name[Y], name[Z]]]
for taget[tuple[[<ast.Name object at 0x7da1b26ae230>, <ast.Name object at 0x7da1b26ae290>]]] in starred[name[node_pairs]] begin[:]
for taget[name[path]] in starred[call[name[nx].all_simple_paths, parameter[name[pdag], name[X], name[Y]]]] begin[:]
variable[is_directed] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da1b26ac820>, <ast.Name object at 0x7da1b26ac7f0>]]] in starred[name[path]] begin[:]
if call[name[pdag].has_edge, parameter[name[dst], name[src]]] begin[:]
variable[is_directed] assign[=] constant[False]
if name[is_directed] begin[:]
call[name[pdag].remove, parameter[name[Y], name[X]]]
break
for taget[tuple[[<ast.Name object at 0x7da1b26ac4c0>, <ast.Name object at 0x7da1b26ad990>]]] in starred[name[node_pairs]] begin[:]
for taget[name[Z]] in starred[binary_operation[binary_operation[binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[X]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[pdag].predecessors, parameter[name[X]]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[pdag].successors, parameter[name[Y]]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[pdag].predecessors, parameter[name[Y]]]]]]] begin[:]
for taget[name[W]] in starred[binary_operation[binary_operation[binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[X]]]]] - call[name[set], parameter[call[name[pdag].predecessors, parameter[name[X]]]]]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[Y]]]]] - call[name[set], parameter[call[name[pdag].predecessors, parameter[name[Y]]]]]]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[call[name[set], parameter[call[name[pdag].successors, parameter[name[Z]]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[pdag].predecessors, parameter[name[Z]]]]]]]] begin[:]
call[name[pdag].remove, parameter[name[W], name[Z]]]
variable[progress] assign[=] compare[name[num_edges] greater[>] call[name[pdag].number_of_edges, parameter[]]]
return[name[pdag]]
|
keyword[def] identifier[skeleton_to_pdag] ( identifier[skel] , identifier[separating_sets] ):
literal[string]
identifier[pdag] = identifier[skel] . identifier[to_directed] ()
identifier[node_pairs] = identifier[combinations] ( identifier[pdag] . identifier[nodes] (), literal[int] )
keyword[for] identifier[X] , identifier[Y] keyword[in] identifier[node_pairs] :
keyword[if] keyword[not] identifier[skel] . identifier[has_edge] ( identifier[X] , identifier[Y] ):
keyword[for] identifier[Z] keyword[in] identifier[set] ( identifier[skel] . identifier[neighbors] ( identifier[X] ))& identifier[set] ( identifier[skel] . identifier[neighbors] ( identifier[Y] )):
keyword[if] identifier[Z] keyword[not] keyword[in] identifier[separating_sets] [ identifier[frozenset] (( identifier[X] , identifier[Y] ))]:
identifier[pdag] . identifier[remove_edges_from] ([( identifier[Z] , identifier[X] ),( identifier[Z] , identifier[Y] )])
identifier[progress] = keyword[True]
keyword[while] identifier[progress] :
identifier[num_edges] = identifier[pdag] . identifier[number_of_edges] ()
keyword[for] identifier[X] , identifier[Y] keyword[in] identifier[node_pairs] :
keyword[for] identifier[Z] keyword[in] (( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[X] ))- identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[X] )))&
( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[Y] ))& identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[Y] )))):
identifier[pdag] . identifier[remove] ( identifier[Y] , identifier[Z] )
keyword[for] identifier[X] , identifier[Y] keyword[in] identifier[node_pairs] :
keyword[for] identifier[path] keyword[in] identifier[nx] . identifier[all_simple_paths] ( identifier[pdag] , identifier[X] , identifier[Y] ):
identifier[is_directed] = keyword[True]
keyword[for] identifier[src] , identifier[dst] keyword[in] identifier[path] :
keyword[if] identifier[pdag] . identifier[has_edge] ( identifier[dst] , identifier[src] ):
identifier[is_directed] = keyword[False]
keyword[if] identifier[is_directed] :
identifier[pdag] . identifier[remove] ( identifier[Y] , identifier[X] )
keyword[break]
keyword[for] identifier[X] , identifier[Y] keyword[in] identifier[node_pairs] :
keyword[for] identifier[Z] keyword[in] ( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[X] ))& identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[X] ))&
identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[Y] ))& identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[Y] ))):
keyword[for] identifier[W] keyword[in] (( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[X] ))- identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[X] )))&
( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[Y] ))- identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[Y] )))&
( identifier[set] ( identifier[pdag] . identifier[successors] ( identifier[Z] ))& identifier[set] ( identifier[pdag] . identifier[predecessors] ( identifier[Z] )))):
identifier[pdag] . identifier[remove] ( identifier[W] , identifier[Z] )
identifier[progress] = identifier[num_edges] > identifier[pdag] . identifier[number_of_edges] ()
keyword[return] identifier[pdag]
|
def skeleton_to_pdag(skel, separating_sets):
"""Orients the edges of a graph skeleton based on information from
`separating_sets` to form a DAG pattern (DAG).
Parameters
----------
skel: UndirectedGraph
An undirected graph skeleton as e.g. produced by the
estimate_skeleton method.
separating_sets: dict
A dict containing for each pair of not directly connected nodes a
separating set ("witnessing set") of variables that makes then
conditionally independent. (needed for edge orientation)
Returns
-------
pdag: DAG
An estimate for the DAG pattern of the BN underlying the data. The
graph might contain some nodes with both-way edges (X->Y and Y->X).
Any completion by (removing one of the both-way edges for each such
pair) results in a I-equivalent Bayesian network DAG.
Reference
---------
Neapolitan, Learning Bayesian Networks, Section 10.1.2, Algorithm 10.2 (page 550)
http://www.cs.technion.ac.il/~dang/books/Learning%20Bayesian%20Networks(Neapolitan,%20Richard).pdf
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from pgmpy.estimators import ConstraintBasedEstimator
>>> data = pd.DataFrame(np.random.randint(0, 4, size=(5000, 3)), columns=list('ABD'))
>>> data['C'] = data['A'] - data['B']
>>> data['D'] += data['A']
>>> c = ConstraintBasedEstimator(data)
>>> pdag = c.skeleton_to_pdag(*c.estimate_skeleton())
>>> pdag.edges() # edges: A->C, B->C, A--D (not directed)
[('B', 'C'), ('A', 'C'), ('A', 'D'), ('D', 'A')]
"""
pdag = skel.to_directed()
node_pairs = combinations(pdag.nodes(), 2)
# 1) for each X-Z-Y, if Z not in the separating set of X,Y, then orient edges as X->Z<-Y
# (Algorithm 3.4 in Koller & Friedman PGM, page 86)
for (X, Y) in node_pairs:
if not skel.has_edge(X, Y):
for Z in set(skel.neighbors(X)) & set(skel.neighbors(Y)):
if Z not in separating_sets[frozenset((X, Y))]:
pdag.remove_edges_from([(Z, X), (Z, Y)]) # depends on [control=['if'], data=['Z']] # depends on [control=['for'], data=['Z']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
progress = True
while progress: # as long as edges can be oriented (removed)
num_edges = pdag.number_of_edges()
# 2) for each X->Z-Y, orient edges to Z->Y
for (X, Y) in node_pairs:
for Z in set(pdag.successors(X)) - set(pdag.predecessors(X)) & (set(pdag.successors(Y)) & set(pdag.predecessors(Y))):
pdag.remove(Y, Z) # depends on [control=['for'], data=['Z']] # depends on [control=['for'], data=[]]
# 3) for each X-Y with a directed path from X to Y, orient edges to X->Y
for (X, Y) in node_pairs:
for path in nx.all_simple_paths(pdag, X, Y):
is_directed = True
for (src, dst) in path:
if pdag.has_edge(dst, src):
is_directed = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if is_directed:
pdag.remove(Y, X)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] # depends on [control=['for'], data=[]]
# 4) for each X-Z-Y with X->W, Y->W, and Z-W, orient edges to Z->W
for (X, Y) in node_pairs:
for Z in set(pdag.successors(X)) & set(pdag.predecessors(X)) & set(pdag.successors(Y)) & set(pdag.predecessors(Y)):
for W in set(pdag.successors(X)) - set(pdag.predecessors(X)) & set(pdag.successors(Y)) - set(pdag.predecessors(Y)) & (set(pdag.successors(Z)) & set(pdag.predecessors(Z))):
pdag.remove(W, Z) # depends on [control=['for'], data=['W']] # depends on [control=['for'], data=['Z']] # depends on [control=['for'], data=[]]
progress = num_edges > pdag.number_of_edges() # depends on [control=['while'], data=[]]
return pdag
|
def update(self, **kwargs):
"""Update a resource by passing in modifications via keyword arguments.
"""
data = self._generate_input_dict(**kwargs)
url = self.parent.url + '/relationship'
self.load(self.client.put(url, data=data))
return self
|
def function[update, parameter[self]]:
constant[Update a resource by passing in modifications via keyword arguments.
]
variable[data] assign[=] call[name[self]._generate_input_dict, parameter[]]
variable[url] assign[=] binary_operation[name[self].parent.url + constant[/relationship]]
call[name[self].load, parameter[call[name[self].client.put, parameter[name[url]]]]]
return[name[self]]
|
keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[self] . identifier[_generate_input_dict] (** identifier[kwargs] )
identifier[url] = identifier[self] . identifier[parent] . identifier[url] + literal[string]
identifier[self] . identifier[load] ( identifier[self] . identifier[client] . identifier[put] ( identifier[url] , identifier[data] = identifier[data] ))
keyword[return] identifier[self]
|
def update(self, **kwargs):
"""Update a resource by passing in modifications via keyword arguments.
"""
data = self._generate_input_dict(**kwargs)
url = self.parent.url + '/relationship'
self.load(self.client.put(url, data=data))
return self
|
def fw_version(self):
"""
Returns the firmware version of the sensor if available. Currently only
I2C/NXT sensors support this.
"""
(self._fw_version, value) = self.get_cached_attr_string(self._fw_version, 'fw_version')
return value
|
def function[fw_version, parameter[self]]:
constant[
Returns the firmware version of the sensor if available. Currently only
I2C/NXT sensors support this.
]
<ast.Tuple object at 0x7da1b1727730> assign[=] call[name[self].get_cached_attr_string, parameter[name[self]._fw_version, constant[fw_version]]]
return[name[value]]
|
keyword[def] identifier[fw_version] ( identifier[self] ):
literal[string]
( identifier[self] . identifier[_fw_version] , identifier[value] )= identifier[self] . identifier[get_cached_attr_string] ( identifier[self] . identifier[_fw_version] , literal[string] )
keyword[return] identifier[value]
|
def fw_version(self):
"""
Returns the firmware version of the sensor if available. Currently only
I2C/NXT sensors support this.
"""
(self._fw_version, value) = self.get_cached_attr_string(self._fw_version, 'fw_version')
return value
|
def generate_sections(self):
"""Return dictionary of source section slugs, name, and counts."""
sources = Dataset.objects.values(
'source', 'source_slug'
).annotate(source_count=Count('source_slug'))
return sorted([
{
'slug': source['source_slug'],
'name': source['source'],
'count': source['source_count']
}
for source in sources
], key=lambda k: k['count'], reverse=True)
|
def function[generate_sections, parameter[self]]:
constant[Return dictionary of source section slugs, name, and counts.]
variable[sources] assign[=] call[call[name[Dataset].objects.values, parameter[constant[source], constant[source_slug]]].annotate, parameter[]]
return[call[name[sorted], parameter[<ast.ListComp object at 0x7da18eb566b0>]]]
|
keyword[def] identifier[generate_sections] ( identifier[self] ):
literal[string]
identifier[sources] = identifier[Dataset] . identifier[objects] . identifier[values] (
literal[string] , literal[string]
). identifier[annotate] ( identifier[source_count] = identifier[Count] ( literal[string] ))
keyword[return] identifier[sorted] ([
{
literal[string] : identifier[source] [ literal[string] ],
literal[string] : identifier[source] [ literal[string] ],
literal[string] : identifier[source] [ literal[string] ]
}
keyword[for] identifier[source] keyword[in] identifier[sources]
], identifier[key] = keyword[lambda] identifier[k] : identifier[k] [ literal[string] ], identifier[reverse] = keyword[True] )
|
def generate_sections(self):
"""Return dictionary of source section slugs, name, and counts."""
sources = Dataset.objects.values('source', 'source_slug').annotate(source_count=Count('source_slug'))
return sorted([{'slug': source['source_slug'], 'name': source['source'], 'count': source['source_count']} for source in sources], key=lambda k: k['count'], reverse=True)
|
def walk(self, function, raise_errors=True,
call_on_sections=False, **keywargs):
"""
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
"""
out = {}
# scalars first
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
# bound again in case name has changed
entry = self.scalars[i]
out[entry] = val
except Exception:
if raise_errors:
raise
else:
entry = self.scalars[i]
out[entry] = False
# then sections
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs)
except Exception:
if raise_errors:
raise
else:
entry = self.sections[i]
out[entry] = False
# bound again in case name has changed
entry = self.sections[i]
# previous result is discarded
out[entry] = self[entry].walk(
function,
raise_errors=raise_errors,
call_on_sections=call_on_sections,
**keywargs)
return out
|
def function[walk, parameter[self, function, raise_errors, call_on_sections]]:
constant[
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
]
variable[out] assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].scalars]]]]] begin[:]
variable[entry] assign[=] call[name[self].scalars][name[i]]
<ast.Try object at 0x7da1b0e63250>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].sections]]]]] begin[:]
variable[entry] assign[=] call[name[self].sections][name[i]]
if name[call_on_sections] begin[:]
<ast.Try object at 0x7da1b0e61b10>
variable[entry] assign[=] call[name[self].sections][name[i]]
call[name[out]][name[entry]] assign[=] call[call[name[self]][name[entry]].walk, parameter[name[function]]]
return[name[out]]
|
keyword[def] identifier[walk] ( identifier[self] , identifier[function] , identifier[raise_errors] = keyword[True] ,
identifier[call_on_sections] = keyword[False] ,** identifier[keywargs] ):
literal[string]
identifier[out] ={}
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[scalars] )):
identifier[entry] = identifier[self] . identifier[scalars] [ identifier[i] ]
keyword[try] :
identifier[val] = identifier[function] ( identifier[self] , identifier[entry] ,** identifier[keywargs] )
identifier[entry] = identifier[self] . identifier[scalars] [ identifier[i] ]
identifier[out] [ identifier[entry] ]= identifier[val]
keyword[except] identifier[Exception] :
keyword[if] identifier[raise_errors] :
keyword[raise]
keyword[else] :
identifier[entry] = identifier[self] . identifier[scalars] [ identifier[i] ]
identifier[out] [ identifier[entry] ]= keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[sections] )):
identifier[entry] = identifier[self] . identifier[sections] [ identifier[i] ]
keyword[if] identifier[call_on_sections] :
keyword[try] :
identifier[function] ( identifier[self] , identifier[entry] ,** identifier[keywargs] )
keyword[except] identifier[Exception] :
keyword[if] identifier[raise_errors] :
keyword[raise]
keyword[else] :
identifier[entry] = identifier[self] . identifier[sections] [ identifier[i] ]
identifier[out] [ identifier[entry] ]= keyword[False]
identifier[entry] = identifier[self] . identifier[sections] [ identifier[i] ]
identifier[out] [ identifier[entry] ]= identifier[self] [ identifier[entry] ]. identifier[walk] (
identifier[function] ,
identifier[raise_errors] = identifier[raise_errors] ,
identifier[call_on_sections] = identifier[call_on_sections] ,
** identifier[keywargs] )
keyword[return] identifier[out]
|
def walk(self, function, raise_errors=True, call_on_sections=False, **keywargs):
"""
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
"""
out = {}
# scalars first
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
# bound again in case name has changed
entry = self.scalars[i]
out[entry] = val # depends on [control=['try'], data=[]]
except Exception:
if raise_errors:
raise # depends on [control=['if'], data=[]]
else:
entry = self.scalars[i]
out[entry] = False # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
# then sections
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs) # depends on [control=['try'], data=[]]
except Exception:
if raise_errors:
raise # depends on [control=['if'], data=[]]
else:
entry = self.sections[i]
out[entry] = False # depends on [control=['except'], data=[]]
# bound again in case name has changed
entry = self.sections[i] # depends on [control=['if'], data=[]]
# previous result is discarded
out[entry] = self[entry].walk(function, raise_errors=raise_errors, call_on_sections=call_on_sections, **keywargs) # depends on [control=['for'], data=['i']]
return out
|
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.url_name, step=failed_step)
|
def function[render_revalidation_failure, parameter[self, failed_step, form]]:
constant[
When a step fails, we have to redirect the user to the first failing
step.
]
name[self].storage.current_step assign[=] name[failed_step]
return[call[name[redirect], parameter[name[self].url_name]]]
|
keyword[def] identifier[render_revalidation_failure] ( identifier[self] , identifier[failed_step] , identifier[form] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[storage] . identifier[current_step] = identifier[failed_step]
keyword[return] identifier[redirect] ( identifier[self] . identifier[url_name] , identifier[step] = identifier[failed_step] )
|
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.url_name, step=failed_step)
|
def end_index(self):
"""Return the 1-based index of the last item on this page."""
paginator = self.paginator
# Special case for the last page because there can be orphans.
if self.number == paginator.num_pages:
return paginator.count
return (self.number - 1) * paginator.per_page + paginator.first_page
|
def function[end_index, parameter[self]]:
constant[Return the 1-based index of the last item on this page.]
variable[paginator] assign[=] name[self].paginator
if compare[name[self].number equal[==] name[paginator].num_pages] begin[:]
return[name[paginator].count]
return[binary_operation[binary_operation[binary_operation[name[self].number - constant[1]] * name[paginator].per_page] + name[paginator].first_page]]
|
keyword[def] identifier[end_index] ( identifier[self] ):
literal[string]
identifier[paginator] = identifier[self] . identifier[paginator]
keyword[if] identifier[self] . identifier[number] == identifier[paginator] . identifier[num_pages] :
keyword[return] identifier[paginator] . identifier[count]
keyword[return] ( identifier[self] . identifier[number] - literal[int] )* identifier[paginator] . identifier[per_page] + identifier[paginator] . identifier[first_page]
|
def end_index(self):
"""Return the 1-based index of the last item on this page."""
paginator = self.paginator
# Special case for the last page because there can be orphans.
if self.number == paginator.num_pages:
return paginator.count # depends on [control=['if'], data=[]]
return (self.number - 1) * paginator.per_page + paginator.first_page
|
def set_rgb_dim_level_with_time(
self,
channelIndex: int,
rgb: RGBColorState,
dimLevel: float,
onTime: float,
rampTime: float,
):
""" sets the color and dimlevel of the lamp
Args:
channelIndex(int): the channelIndex of the lamp. Use self.topLightChannelIndex or self.bottomLightChannelIndex
rgb(RGBColorState): the color of the lamp
dimLevel(float): the dimLevel of the lamp. 0.0 = off, 1.0 = MAX
onTime(float):
rampTime(float):
Returns:
the result of the _restCall
"""
data = {
"channelIndex": channelIndex,
"deviceId": self.id,
"simpleRGBColorState": rgb,
"dimLevel": dimLevel,
"onTime": onTime,
"rampTime": rampTime,
}
return self._restCall(
"device/control/setSimpleRGBColorDimLevelWithTime", body=json.dumps(data)
)
|
def function[set_rgb_dim_level_with_time, parameter[self, channelIndex, rgb, dimLevel, onTime, rampTime]]:
constant[ sets the color and dimlevel of the lamp
Args:
channelIndex(int): the channelIndex of the lamp. Use self.topLightChannelIndex or self.bottomLightChannelIndex
rgb(RGBColorState): the color of the lamp
dimLevel(float): the dimLevel of the lamp. 0.0 = off, 1.0 = MAX
onTime(float):
rampTime(float):
Returns:
the result of the _restCall
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b060a3e0>, <ast.Constant object at 0x7da1b060a8c0>, <ast.Constant object at 0x7da1b060a1d0>, <ast.Constant object at 0x7da1b06084f0>, <ast.Constant object at 0x7da1b0608fa0>, <ast.Constant object at 0x7da1b060ad70>], [<ast.Name object at 0x7da1b060a9b0>, <ast.Attribute object at 0x7da1b060a9e0>, <ast.Name object at 0x7da1b0608790>, <ast.Name object at 0x7da207f9b550>, <ast.Name object at 0x7da207f9a440>, <ast.Name object at 0x7da18ede53f0>]]
return[call[name[self]._restCall, parameter[constant[device/control/setSimpleRGBColorDimLevelWithTime]]]]
|
keyword[def] identifier[set_rgb_dim_level_with_time] (
identifier[self] ,
identifier[channelIndex] : identifier[int] ,
identifier[rgb] : identifier[RGBColorState] ,
identifier[dimLevel] : identifier[float] ,
identifier[onTime] : identifier[float] ,
identifier[rampTime] : identifier[float] ,
):
literal[string]
identifier[data] ={
literal[string] : identifier[channelIndex] ,
literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[rgb] ,
literal[string] : identifier[dimLevel] ,
literal[string] : identifier[onTime] ,
literal[string] : identifier[rampTime] ,
}
keyword[return] identifier[self] . identifier[_restCall] (
literal[string] , identifier[body] = identifier[json] . identifier[dumps] ( identifier[data] )
)
|
def set_rgb_dim_level_with_time(self, channelIndex: int, rgb: RGBColorState, dimLevel: float, onTime: float, rampTime: float):
""" sets the color and dimlevel of the lamp
Args:
channelIndex(int): the channelIndex of the lamp. Use self.topLightChannelIndex or self.bottomLightChannelIndex
rgb(RGBColorState): the color of the lamp
dimLevel(float): the dimLevel of the lamp. 0.0 = off, 1.0 = MAX
onTime(float):
rampTime(float):
Returns:
the result of the _restCall
"""
data = {'channelIndex': channelIndex, 'deviceId': self.id, 'simpleRGBColorState': rgb, 'dimLevel': dimLevel, 'onTime': onTime, 'rampTime': rampTime}
return self._restCall('device/control/setSimpleRGBColorDimLevelWithTime', body=json.dumps(data))
|
def createGroup(self, group, vendorSpecific=None):
"""See Also: createGroupResponse()
Args:
group:
vendorSpecific:
Returns:
"""
response = self.createGroupResponse(group, vendorSpecific)
return self._read_boolean_response(response)
|
def function[createGroup, parameter[self, group, vendorSpecific]]:
constant[See Also: createGroupResponse()
Args:
group:
vendorSpecific:
Returns:
]
variable[response] assign[=] call[name[self].createGroupResponse, parameter[name[group], name[vendorSpecific]]]
return[call[name[self]._read_boolean_response, parameter[name[response]]]]
|
keyword[def] identifier[createGroup] ( identifier[self] , identifier[group] , identifier[vendorSpecific] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[createGroupResponse] ( identifier[group] , identifier[vendorSpecific] )
keyword[return] identifier[self] . identifier[_read_boolean_response] ( identifier[response] )
|
def createGroup(self, group, vendorSpecific=None):
"""See Also: createGroupResponse()
Args:
group:
vendorSpecific:
Returns:
"""
response = self.createGroupResponse(group, vendorSpecific)
return self._read_boolean_response(response)
|
def merge(self, from_email, source_incidents):
"""Merge other incidents into this incident."""
if from_email is None or not isinstance(from_email, six.string_types):
raise MissingFromEmail(from_email)
add_headers = {'from': from_email, }
endpoint = '/'.join((self.endpoint, self.id, 'merge'))
incident_ids = [entity['id'] if isinstance(entity, Entity) else entity
for entity in source_incidents]
incident_references = [{'type': 'incident_reference', 'id': id_}
for id_ in incident_ids]
return self.__class__.create(
endpoint=endpoint,
api_key=self.api_key,
add_headers=add_headers,
data_key='source_incidents',
data=incident_references,
method='PUT',
)
|
def function[merge, parameter[self, from_email, source_incidents]]:
constant[Merge other incidents into this incident.]
if <ast.BoolOp object at 0x7da1b06fd480> begin[:]
<ast.Raise object at 0x7da1b06ff940>
variable[add_headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b06fc160>], [<ast.Name object at 0x7da1b06fee30>]]
variable[endpoint] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da1b06fc3d0>, <ast.Attribute object at 0x7da1b06ff850>, <ast.Constant object at 0x7da1b06feec0>]]]]
variable[incident_ids] assign[=] <ast.ListComp object at 0x7da1b06fd360>
variable[incident_references] assign[=] <ast.ListComp object at 0x7da1b06fea40>
return[call[name[self].__class__.create, parameter[]]]
|
keyword[def] identifier[merge] ( identifier[self] , identifier[from_email] , identifier[source_incidents] ):
literal[string]
keyword[if] identifier[from_email] keyword[is] keyword[None] keyword[or] keyword[not] identifier[isinstance] ( identifier[from_email] , identifier[six] . identifier[string_types] ):
keyword[raise] identifier[MissingFromEmail] ( identifier[from_email] )
identifier[add_headers] ={ literal[string] : identifier[from_email] ,}
identifier[endpoint] = literal[string] . identifier[join] (( identifier[self] . identifier[endpoint] , identifier[self] . identifier[id] , literal[string] ))
identifier[incident_ids] =[ identifier[entity] [ literal[string] ] keyword[if] identifier[isinstance] ( identifier[entity] , identifier[Entity] ) keyword[else] identifier[entity]
keyword[for] identifier[entity] keyword[in] identifier[source_incidents] ]
identifier[incident_references] =[{ literal[string] : literal[string] , literal[string] : identifier[id_] }
keyword[for] identifier[id_] keyword[in] identifier[incident_ids] ]
keyword[return] identifier[self] . identifier[__class__] . identifier[create] (
identifier[endpoint] = identifier[endpoint] ,
identifier[api_key] = identifier[self] . identifier[api_key] ,
identifier[add_headers] = identifier[add_headers] ,
identifier[data_key] = literal[string] ,
identifier[data] = identifier[incident_references] ,
identifier[method] = literal[string] ,
)
|
def merge(self, from_email, source_incidents):
"""Merge other incidents into this incident."""
if from_email is None or not isinstance(from_email, six.string_types):
raise MissingFromEmail(from_email) # depends on [control=['if'], data=[]]
add_headers = {'from': from_email}
endpoint = '/'.join((self.endpoint, self.id, 'merge'))
incident_ids = [entity['id'] if isinstance(entity, Entity) else entity for entity in source_incidents]
incident_references = [{'type': 'incident_reference', 'id': id_} for id_ in incident_ids]
return self.__class__.create(endpoint=endpoint, api_key=self.api_key, add_headers=add_headers, data_key='source_incidents', data=incident_references, method='PUT')
|
def count_variables_by_type(variables=None):
"""Returns a dict mapping dtypes to number of variables and scalars.
Args:
variables: iterable of `tf.Variable`s, or None. If None is passed, then all
global and local variables in the current graph are used.
Returns:
A dict mapping tf.dtype keys to a dict containing the keys 'num_scalars' and
'num_variables'.
"""
if variables is None:
variables = tf.global_variables() + tf.local_variables()
unique_types = set(v.dtype.base_dtype for v in variables)
results_dict = {}
for dtype in unique_types:
if dtype == tf.string:
tf.logging.warning(
"NB: string Variables present. The memory usage for these Variables "
"will not be accurately computed as it depends on the exact strings "
"stored in a particular session.")
vars_of_type = [v for v in variables if v.dtype.base_dtype == dtype]
num_scalars = sum(v.shape.num_elements() for v in vars_of_type)
results_dict[dtype] = {
"num_variables": len(vars_of_type),
"num_scalars": num_scalars
}
return results_dict
|
def function[count_variables_by_type, parameter[variables]]:
constant[Returns a dict mapping dtypes to number of variables and scalars.
Args:
variables: iterable of `tf.Variable`s, or None. If None is passed, then all
global and local variables in the current graph are used.
Returns:
A dict mapping tf.dtype keys to a dict containing the keys 'num_scalars' and
'num_variables'.
]
if compare[name[variables] is constant[None]] begin[:]
variable[variables] assign[=] binary_operation[call[name[tf].global_variables, parameter[]] + call[name[tf].local_variables, parameter[]]]
variable[unique_types] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1c1a170>]]
variable[results_dict] assign[=] dictionary[[], []]
for taget[name[dtype]] in starred[name[unique_types]] begin[:]
if compare[name[dtype] equal[==] name[tf].string] begin[:]
call[name[tf].logging.warning, parameter[constant[NB: string Variables present. The memory usage for these Variables will not be accurately computed as it depends on the exact strings stored in a particular session.]]]
variable[vars_of_type] assign[=] <ast.ListComp object at 0x7da1b1c1b010>
variable[num_scalars] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1c180a0>]]
call[name[results_dict]][name[dtype]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c196f0>, <ast.Constant object at 0x7da1b1c18b80>], [<ast.Call object at 0x7da1b1c1a440>, <ast.Name object at 0x7da1b1c1aad0>]]
return[name[results_dict]]
|
keyword[def] identifier[count_variables_by_type] ( identifier[variables] = keyword[None] ):
literal[string]
keyword[if] identifier[variables] keyword[is] keyword[None] :
identifier[variables] = identifier[tf] . identifier[global_variables] ()+ identifier[tf] . identifier[local_variables] ()
identifier[unique_types] = identifier[set] ( identifier[v] . identifier[dtype] . identifier[base_dtype] keyword[for] identifier[v] keyword[in] identifier[variables] )
identifier[results_dict] ={}
keyword[for] identifier[dtype] keyword[in] identifier[unique_types] :
keyword[if] identifier[dtype] == identifier[tf] . identifier[string] :
identifier[tf] . identifier[logging] . identifier[warning] (
literal[string]
literal[string]
literal[string] )
identifier[vars_of_type] =[ identifier[v] keyword[for] identifier[v] keyword[in] identifier[variables] keyword[if] identifier[v] . identifier[dtype] . identifier[base_dtype] == identifier[dtype] ]
identifier[num_scalars] = identifier[sum] ( identifier[v] . identifier[shape] . identifier[num_elements] () keyword[for] identifier[v] keyword[in] identifier[vars_of_type] )
identifier[results_dict] [ identifier[dtype] ]={
literal[string] : identifier[len] ( identifier[vars_of_type] ),
literal[string] : identifier[num_scalars]
}
keyword[return] identifier[results_dict]
|
def count_variables_by_type(variables=None):
"""Returns a dict mapping dtypes to number of variables and scalars.
Args:
variables: iterable of `tf.Variable`s, or None. If None is passed, then all
global and local variables in the current graph are used.
Returns:
A dict mapping tf.dtype keys to a dict containing the keys 'num_scalars' and
'num_variables'.
"""
if variables is None:
variables = tf.global_variables() + tf.local_variables() # depends on [control=['if'], data=['variables']]
unique_types = set((v.dtype.base_dtype for v in variables))
results_dict = {}
for dtype in unique_types:
if dtype == tf.string:
tf.logging.warning('NB: string Variables present. The memory usage for these Variables will not be accurately computed as it depends on the exact strings stored in a particular session.') # depends on [control=['if'], data=[]]
vars_of_type = [v for v in variables if v.dtype.base_dtype == dtype]
num_scalars = sum((v.shape.num_elements() for v in vars_of_type))
results_dict[dtype] = {'num_variables': len(vars_of_type), 'num_scalars': num_scalars} # depends on [control=['for'], data=['dtype']]
return results_dict
|
def switch_zeros_of_values(self):
"""If we are a of: rule, we can get some 0 in of_values,
if so, change them with NB sons instead
:return: None
"""
nb_sons = len(self.sons)
# Need a list for assignment
new_values = list(self.of_values)
for i in [0, 1, 2]:
if new_values[i] == '0':
new_values[i] = str(nb_sons)
self.of_values = tuple(new_values)
|
def function[switch_zeros_of_values, parameter[self]]:
constant[If we are a of: rule, we can get some 0 in of_values,
if so, change them with NB sons instead
:return: None
]
variable[nb_sons] assign[=] call[name[len], parameter[name[self].sons]]
variable[new_values] assign[=] call[name[list], parameter[name[self].of_values]]
for taget[name[i]] in starred[list[[<ast.Constant object at 0x7da18bc71390>, <ast.Constant object at 0x7da18bc73910>, <ast.Constant object at 0x7da18bc72590>]]] begin[:]
if compare[call[name[new_values]][name[i]] equal[==] constant[0]] begin[:]
call[name[new_values]][name[i]] assign[=] call[name[str], parameter[name[nb_sons]]]
name[self].of_values assign[=] call[name[tuple], parameter[name[new_values]]]
|
keyword[def] identifier[switch_zeros_of_values] ( identifier[self] ):
literal[string]
identifier[nb_sons] = identifier[len] ( identifier[self] . identifier[sons] )
identifier[new_values] = identifier[list] ( identifier[self] . identifier[of_values] )
keyword[for] identifier[i] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[if] identifier[new_values] [ identifier[i] ]== literal[string] :
identifier[new_values] [ identifier[i] ]= identifier[str] ( identifier[nb_sons] )
identifier[self] . identifier[of_values] = identifier[tuple] ( identifier[new_values] )
|
def switch_zeros_of_values(self):
"""If we are a of: rule, we can get some 0 in of_values,
if so, change them with NB sons instead
:return: None
"""
nb_sons = len(self.sons)
# Need a list for assignment
new_values = list(self.of_values)
for i in [0, 1, 2]:
if new_values[i] == '0':
new_values[i] = str(nb_sons) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
self.of_values = tuple(new_values)
|
def _cdf(self, xloc, left, right, cache):
"""
Cumulative distribution function.
Example:
>>> print(chaospy.Uniform().fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0.5 1. 1. ]
>>> print(chaospy.Add(chaospy.Uniform(), 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, chaospy.Uniform()).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0. 1.]
"""
left = evaluation.get_forward_cache(left, cache)
right = evaluation.get_forward_cache(right, cache)
if isinstance(left, Dist):
if isinstance(right, Dist):
raise evaluation.DependencyError(
"under-defined distribution {} or {}".format(left, right))
elif not isinstance(right, Dist):
return numpy.asfarray(left+right <= xloc)
else:
left, right = right, left
xloc = (xloc.T-numpy.asfarray(right).T).T
output = evaluation.evaluate_forward(left, xloc, cache=cache)
assert output.shape == xloc.shape
return output
|
def function[_cdf, parameter[self, xloc, left, right, cache]]:
constant[
Cumulative distribution function.
Example:
>>> print(chaospy.Uniform().fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0.5 1. 1. ]
>>> print(chaospy.Add(chaospy.Uniform(), 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, chaospy.Uniform()).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0. 1.]
]
variable[left] assign[=] call[name[evaluation].get_forward_cache, parameter[name[left], name[cache]]]
variable[right] assign[=] call[name[evaluation].get_forward_cache, parameter[name[right], name[cache]]]
if call[name[isinstance], parameter[name[left], name[Dist]]] begin[:]
if call[name[isinstance], parameter[name[right], name[Dist]]] begin[:]
<ast.Raise object at 0x7da18f58dc60>
variable[xloc] assign[=] binary_operation[name[xloc].T - call[name[numpy].asfarray, parameter[name[right]]].T].T
variable[output] assign[=] call[name[evaluation].evaluate_forward, parameter[name[left], name[xloc]]]
assert[compare[name[output].shape equal[==] name[xloc].shape]]
return[name[output]]
|
keyword[def] identifier[_cdf] ( identifier[self] , identifier[xloc] , identifier[left] , identifier[right] , identifier[cache] ):
literal[string]
identifier[left] = identifier[evaluation] . identifier[get_forward_cache] ( identifier[left] , identifier[cache] )
identifier[right] = identifier[evaluation] . identifier[get_forward_cache] ( identifier[right] , identifier[cache] )
keyword[if] identifier[isinstance] ( identifier[left] , identifier[Dist] ):
keyword[if] identifier[isinstance] ( identifier[right] , identifier[Dist] ):
keyword[raise] identifier[evaluation] . identifier[DependencyError] (
literal[string] . identifier[format] ( identifier[left] , identifier[right] ))
keyword[elif] keyword[not] identifier[isinstance] ( identifier[right] , identifier[Dist] ):
keyword[return] identifier[numpy] . identifier[asfarray] ( identifier[left] + identifier[right] <= identifier[xloc] )
keyword[else] :
identifier[left] , identifier[right] = identifier[right] , identifier[left]
identifier[xloc] =( identifier[xloc] . identifier[T] - identifier[numpy] . identifier[asfarray] ( identifier[right] ). identifier[T] ). identifier[T]
identifier[output] = identifier[evaluation] . identifier[evaluate_forward] ( identifier[left] , identifier[xloc] , identifier[cache] = identifier[cache] )
keyword[assert] identifier[output] . identifier[shape] == identifier[xloc] . identifier[shape]
keyword[return] identifier[output]
|
def _cdf(self, xloc, left, right, cache):
"""
Cumulative distribution function.
Example:
>>> print(chaospy.Uniform().fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0.5 1. 1. ]
>>> print(chaospy.Add(chaospy.Uniform(), 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, chaospy.Uniform()).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0.5 1. ]
>>> print(chaospy.Add(1, 1).fwd([-0.5, 0.5, 1.5, 2.5]))
[0. 0. 0. 1.]
"""
left = evaluation.get_forward_cache(left, cache)
right = evaluation.get_forward_cache(right, cache)
if isinstance(left, Dist):
if isinstance(right, Dist):
raise evaluation.DependencyError('under-defined distribution {} or {}'.format(left, right)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not isinstance(right, Dist):
return numpy.asfarray(left + right <= xloc) # depends on [control=['if'], data=[]]
else:
(left, right) = (right, left)
xloc = (xloc.T - numpy.asfarray(right).T).T
output = evaluation.evaluate_forward(left, xloc, cache=cache)
assert output.shape == xloc.shape
return output
|
def regon_checksum(digits):
"""
Calculates and returns a control digit for given list of digits basing on REGON standard.
"""
weights_for_check_digit = [8, 9, 2, 3, 4, 5, 6, 7]
check_digit = 0
for i in range(0, 8):
check_digit += weights_for_check_digit[i] * digits[i]
check_digit %= 11
if check_digit == 10:
check_digit = 0
return check_digit
|
def function[regon_checksum, parameter[digits]]:
constant[
Calculates and returns a control digit for given list of digits basing on REGON standard.
]
variable[weights_for_check_digit] assign[=] list[[<ast.Constant object at 0x7da207f9b220>, <ast.Constant object at 0x7da207f98520>, <ast.Constant object at 0x7da207f9a710>, <ast.Constant object at 0x7da207f9bb20>, <ast.Constant object at 0x7da207f9a080>, <ast.Constant object at 0x7da207f9b400>, <ast.Constant object at 0x7da207f99060>, <ast.Constant object at 0x7da207f982b0>]]
variable[check_digit] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[8]]]] begin[:]
<ast.AugAssign object at 0x7da207f98100>
<ast.AugAssign object at 0x7da207f9afe0>
if compare[name[check_digit] equal[==] constant[10]] begin[:]
variable[check_digit] assign[=] constant[0]
return[name[check_digit]]
|
keyword[def] identifier[regon_checksum] ( identifier[digits] ):
literal[string]
identifier[weights_for_check_digit] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[check_digit] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[check_digit] += identifier[weights_for_check_digit] [ identifier[i] ]* identifier[digits] [ identifier[i] ]
identifier[check_digit] %= literal[int]
keyword[if] identifier[check_digit] == literal[int] :
identifier[check_digit] = literal[int]
keyword[return] identifier[check_digit]
|
def regon_checksum(digits):
"""
Calculates and returns a control digit for given list of digits basing on REGON standard.
"""
weights_for_check_digit = [8, 9, 2, 3, 4, 5, 6, 7]
check_digit = 0
for i in range(0, 8):
check_digit += weights_for_check_digit[i] * digits[i] # depends on [control=['for'], data=['i']]
check_digit %= 11
if check_digit == 10:
check_digit = 0 # depends on [control=['if'], data=['check_digit']]
return check_digit
|
def get_auth(self, username, password, authoritative_source, auth_options=None):
""" Returns an authentication object.
Examines the auth backend given after the '@' in the username and
returns a suitable instance of a subclass of the BaseAuth class.
* `username` [string]
Username to authenticate as.
* `password` [string]
Password to authenticate with.
* `authoritative_source` [string]
Authoritative source of the query.
* `auth_options` [dict]
A dict which, if authenticated as a trusted user, can override
`username` and `authoritative_source`.
"""
if auth_options is None:
auth_options = {}
# validate arguments
if (authoritative_source is None):
raise AuthError("Missing authoritative_source.")
# remove invalid cache entries
rem = list()
for key in self._auth_cache:
if self._auth_cache[key]['valid_until'] < datetime.utcnow():
rem.append(key)
for key in rem:
del(self._auth_cache[key])
user_authbackend = username.rsplit('@', 1)
# Find out what auth backend to use.
# If no auth backend was specified in username, use default
backend = ""
if len(user_authbackend) == 1:
backend = self._config.get('auth', 'default_backend')
self._logger.debug("Using default auth backend %s" % backend)
else:
backend = user_authbackend[1]
# do we have a cached instance?
auth_str = ( str(username) + str(password) + str(authoritative_source)
+ str(auth_options) )
if auth_str in self._auth_cache:
self._logger.debug('found cached auth object for user %s' % username)
return self._auth_cache[auth_str]['auth_object']
# Create auth object
try:
auth = self._backends[backend](backend, user_authbackend[0], password, authoritative_source, auth_options)
except KeyError:
raise AuthError("Invalid auth backend '%s' specified" %
str(backend))
# save auth object to cache
self._auth_cache[auth_str] = {
'valid_until': datetime.utcnow() + timedelta(seconds=self._config.getint('auth', 'auth_cache_timeout')),
'auth_object': auth
}
return auth
|
def function[get_auth, parameter[self, username, password, authoritative_source, auth_options]]:
constant[ Returns an authentication object.
Examines the auth backend given after the '@' in the username and
returns a suitable instance of a subclass of the BaseAuth class.
* `username` [string]
Username to authenticate as.
* `password` [string]
Password to authenticate with.
* `authoritative_source` [string]
Authoritative source of the query.
* `auth_options` [dict]
A dict which, if authenticated as a trusted user, can override
`username` and `authoritative_source`.
]
if compare[name[auth_options] is constant[None]] begin[:]
variable[auth_options] assign[=] dictionary[[], []]
if compare[name[authoritative_source] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b2344820>
variable[rem] assign[=] call[name[list], parameter[]]
for taget[name[key]] in starred[name[self]._auth_cache] begin[:]
if compare[call[call[name[self]._auth_cache][name[key]]][constant[valid_until]] less[<] call[name[datetime].utcnow, parameter[]]] begin[:]
call[name[rem].append, parameter[name[key]]]
for taget[name[key]] in starred[name[rem]] begin[:]
<ast.Delete object at 0x7da1b2345c30>
variable[user_authbackend] assign[=] call[name[username].rsplit, parameter[constant[@], constant[1]]]
variable[backend] assign[=] constant[]
if compare[call[name[len], parameter[name[user_authbackend]]] equal[==] constant[1]] begin[:]
variable[backend] assign[=] call[name[self]._config.get, parameter[constant[auth], constant[default_backend]]]
call[name[self]._logger.debug, parameter[binary_operation[constant[Using default auth backend %s] <ast.Mod object at 0x7da2590d6920> name[backend]]]]
variable[auth_str] assign[=] binary_operation[binary_operation[binary_operation[call[name[str], parameter[name[username]]] + call[name[str], parameter[name[password]]]] + call[name[str], parameter[name[authoritative_source]]]] + call[name[str], parameter[name[auth_options]]]]
if compare[name[auth_str] in name[self]._auth_cache] begin[:]
call[name[self]._logger.debug, parameter[binary_operation[constant[found cached auth object for user %s] <ast.Mod object at 0x7da2590d6920> name[username]]]]
return[call[call[name[self]._auth_cache][name[auth_str]]][constant[auth_object]]]
<ast.Try object at 0x7da1b23450c0>
call[name[self]._auth_cache][name[auth_str]] assign[=] dictionary[[<ast.Constant object at 0x7da18dc04ca0>, <ast.Constant object at 0x7da18dc05f00>], [<ast.BinOp object at 0x7da18dc072b0>, <ast.Name object at 0x7da18dc074f0>]]
return[name[auth]]
|
keyword[def] identifier[get_auth] ( identifier[self] , identifier[username] , identifier[password] , identifier[authoritative_source] , identifier[auth_options] = keyword[None] ):
literal[string]
keyword[if] identifier[auth_options] keyword[is] keyword[None] :
identifier[auth_options] ={}
keyword[if] ( identifier[authoritative_source] keyword[is] keyword[None] ):
keyword[raise] identifier[AuthError] ( literal[string] )
identifier[rem] = identifier[list] ()
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_auth_cache] :
keyword[if] identifier[self] . identifier[_auth_cache] [ identifier[key] ][ literal[string] ]< identifier[datetime] . identifier[utcnow] ():
identifier[rem] . identifier[append] ( identifier[key] )
keyword[for] identifier[key] keyword[in] identifier[rem] :
keyword[del] ( identifier[self] . identifier[_auth_cache] [ identifier[key] ])
identifier[user_authbackend] = identifier[username] . identifier[rsplit] ( literal[string] , literal[int] )
identifier[backend] = literal[string]
keyword[if] identifier[len] ( identifier[user_authbackend] )== literal[int] :
identifier[backend] = identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[backend] )
keyword[else] :
identifier[backend] = identifier[user_authbackend] [ literal[int] ]
identifier[auth_str] =( identifier[str] ( identifier[username] )+ identifier[str] ( identifier[password] )+ identifier[str] ( identifier[authoritative_source] )
+ identifier[str] ( identifier[auth_options] ))
keyword[if] identifier[auth_str] keyword[in] identifier[self] . identifier[_auth_cache] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[username] )
keyword[return] identifier[self] . identifier[_auth_cache] [ identifier[auth_str] ][ literal[string] ]
keyword[try] :
identifier[auth] = identifier[self] . identifier[_backends] [ identifier[backend] ]( identifier[backend] , identifier[user_authbackend] [ literal[int] ], identifier[password] , identifier[authoritative_source] , identifier[auth_options] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[AuthError] ( literal[string] %
identifier[str] ( identifier[backend] ))
identifier[self] . identifier[_auth_cache] [ identifier[auth_str] ]={
literal[string] : identifier[datetime] . identifier[utcnow] ()+ identifier[timedelta] ( identifier[seconds] = identifier[self] . identifier[_config] . identifier[getint] ( literal[string] , literal[string] )),
literal[string] : identifier[auth]
}
keyword[return] identifier[auth]
|
def get_auth(self, username, password, authoritative_source, auth_options=None):
""" Returns an authentication object.
Examines the auth backend given after the '@' in the username and
returns a suitable instance of a subclass of the BaseAuth class.
* `username` [string]
Username to authenticate as.
* `password` [string]
Password to authenticate with.
* `authoritative_source` [string]
Authoritative source of the query.
* `auth_options` [dict]
A dict which, if authenticated as a trusted user, can override
`username` and `authoritative_source`.
"""
if auth_options is None:
auth_options = {} # depends on [control=['if'], data=['auth_options']]
# validate arguments
if authoritative_source is None:
raise AuthError('Missing authoritative_source.') # depends on [control=['if'], data=[]]
# remove invalid cache entries
rem = list()
for key in self._auth_cache:
if self._auth_cache[key]['valid_until'] < datetime.utcnow():
rem.append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
for key in rem:
del self._auth_cache[key] # depends on [control=['for'], data=['key']]
user_authbackend = username.rsplit('@', 1)
# Find out what auth backend to use.
# If no auth backend was specified in username, use default
backend = ''
if len(user_authbackend) == 1:
backend = self._config.get('auth', 'default_backend')
self._logger.debug('Using default auth backend %s' % backend) # depends on [control=['if'], data=[]]
else:
backend = user_authbackend[1]
# do we have a cached instance?
auth_str = str(username) + str(password) + str(authoritative_source) + str(auth_options)
if auth_str in self._auth_cache:
self._logger.debug('found cached auth object for user %s' % username)
return self._auth_cache[auth_str]['auth_object'] # depends on [control=['if'], data=['auth_str']]
# Create auth object
try:
auth = self._backends[backend](backend, user_authbackend[0], password, authoritative_source, auth_options) # depends on [control=['try'], data=[]]
except KeyError:
raise AuthError("Invalid auth backend '%s' specified" % str(backend)) # depends on [control=['except'], data=[]]
# save auth object to cache
self._auth_cache[auth_str] = {'valid_until': datetime.utcnow() + timedelta(seconds=self._config.getint('auth', 'auth_cache_timeout')), 'auth_object': auth}
return auth
|
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
|
def function[stn, parameter[s, length, encoding, errors]]:
constant[Convert a string to a null-terminated bytes object.
]
variable[s] assign[=] call[name[s].encode, parameter[name[encoding], name[errors]]]
return[binary_operation[call[name[s]][<ast.Slice object at 0x7da1b2067670>] + binary_operation[binary_operation[name[length] - call[name[len], parameter[name[s]]]] * name[NUL]]]]
|
keyword[def] identifier[stn] ( identifier[s] , identifier[length] , identifier[encoding] , identifier[errors] ):
literal[string]
identifier[s] = identifier[s] . identifier[encode] ( identifier[encoding] , identifier[errors] )
keyword[return] identifier[s] [: identifier[length] ]+( identifier[length] - identifier[len] ( identifier[s] ))* identifier[NUL]
|
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
|
def adj_nodes_gcp(gcp_nodes):
"""Adjust details specific to GCP."""
for node in gcp_nodes:
node.cloud = "gcp"
node.cloud_disp = "GCP"
node.private_ips = ip_to_str(node.private_ips)
node.public_ips = ip_to_str(node.public_ips)
node.zone = node.extra['zone'].name
return gcp_nodes
|
def function[adj_nodes_gcp, parameter[gcp_nodes]]:
constant[Adjust details specific to GCP.]
for taget[name[node]] in starred[name[gcp_nodes]] begin[:]
name[node].cloud assign[=] constant[gcp]
name[node].cloud_disp assign[=] constant[GCP]
name[node].private_ips assign[=] call[name[ip_to_str], parameter[name[node].private_ips]]
name[node].public_ips assign[=] call[name[ip_to_str], parameter[name[node].public_ips]]
name[node].zone assign[=] call[name[node].extra][constant[zone]].name
return[name[gcp_nodes]]
|
keyword[def] identifier[adj_nodes_gcp] ( identifier[gcp_nodes] ):
literal[string]
keyword[for] identifier[node] keyword[in] identifier[gcp_nodes] :
identifier[node] . identifier[cloud] = literal[string]
identifier[node] . identifier[cloud_disp] = literal[string]
identifier[node] . identifier[private_ips] = identifier[ip_to_str] ( identifier[node] . identifier[private_ips] )
identifier[node] . identifier[public_ips] = identifier[ip_to_str] ( identifier[node] . identifier[public_ips] )
identifier[node] . identifier[zone] = identifier[node] . identifier[extra] [ literal[string] ]. identifier[name]
keyword[return] identifier[gcp_nodes]
|
def adj_nodes_gcp(gcp_nodes):
"""Adjust details specific to GCP."""
for node in gcp_nodes:
node.cloud = 'gcp'
node.cloud_disp = 'GCP'
node.private_ips = ip_to_str(node.private_ips)
node.public_ips = ip_to_str(node.public_ips)
node.zone = node.extra['zone'].name # depends on [control=['for'], data=['node']]
return gcp_nodes
|
def redirect(self, url, method=None, **kwargs):
"""
Create a <Redirect> element
:param url: Redirect URL
:param method: Redirect URL method
:param kwargs: additional attributes
:returns: <Redirect> element
"""
return self.nest(Redirect(url, method=method, **kwargs))
|
def function[redirect, parameter[self, url, method]]:
constant[
Create a <Redirect> element
:param url: Redirect URL
:param method: Redirect URL method
:param kwargs: additional attributes
:returns: <Redirect> element
]
return[call[name[self].nest, parameter[call[name[Redirect], parameter[name[url]]]]]]
|
keyword[def] identifier[redirect] ( identifier[self] , identifier[url] , identifier[method] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[nest] ( identifier[Redirect] ( identifier[url] , identifier[method] = identifier[method] ,** identifier[kwargs] ))
|
def redirect(self, url, method=None, **kwargs):
"""
Create a <Redirect> element
:param url: Redirect URL
:param method: Redirect URL method
:param kwargs: additional attributes
:returns: <Redirect> element
"""
return self.nest(Redirect(url, method=method, **kwargs))
|
def getThirdPartyLibCompilerFlags(self, libs):
"""
Retrieves the compiler flags for building against the Unreal-bundled versions of the specified third-party libraries
"""
fmt = PrintingFormat.singleLine()
if libs[0] == '--multiline':
fmt = PrintingFormat.multiLine()
libs = libs[1:]
platformDefaults = True
if libs[0] == '--nodefaults':
platformDefaults = False
libs = libs[1:]
details = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)
return details.getCompilerFlags(self.getEngineRoot(), fmt)
|
def function[getThirdPartyLibCompilerFlags, parameter[self, libs]]:
constant[
Retrieves the compiler flags for building against the Unreal-bundled versions of the specified third-party libraries
]
variable[fmt] assign[=] call[name[PrintingFormat].singleLine, parameter[]]
if compare[call[name[libs]][constant[0]] equal[==] constant[--multiline]] begin[:]
variable[fmt] assign[=] call[name[PrintingFormat].multiLine, parameter[]]
variable[libs] assign[=] call[name[libs]][<ast.Slice object at 0x7da18f00ded0>]
variable[platformDefaults] assign[=] constant[True]
if compare[call[name[libs]][constant[0]] equal[==] constant[--nodefaults]] begin[:]
variable[platformDefaults] assign[=] constant[False]
variable[libs] assign[=] call[name[libs]][<ast.Slice object at 0x7da18f00e290>]
variable[details] assign[=] call[name[self].getThirdpartyLibs, parameter[name[libs]]]
return[call[name[details].getCompilerFlags, parameter[call[name[self].getEngineRoot, parameter[]], name[fmt]]]]
|
keyword[def] identifier[getThirdPartyLibCompilerFlags] ( identifier[self] , identifier[libs] ):
literal[string]
identifier[fmt] = identifier[PrintingFormat] . identifier[singleLine] ()
keyword[if] identifier[libs] [ literal[int] ]== literal[string] :
identifier[fmt] = identifier[PrintingFormat] . identifier[multiLine] ()
identifier[libs] = identifier[libs] [ literal[int] :]
identifier[platformDefaults] = keyword[True]
keyword[if] identifier[libs] [ literal[int] ]== literal[string] :
identifier[platformDefaults] = keyword[False]
identifier[libs] = identifier[libs] [ literal[int] :]
identifier[details] = identifier[self] . identifier[getThirdpartyLibs] ( identifier[libs] , identifier[includePlatformDefaults] = identifier[platformDefaults] )
keyword[return] identifier[details] . identifier[getCompilerFlags] ( identifier[self] . identifier[getEngineRoot] (), identifier[fmt] )
|
def getThirdPartyLibCompilerFlags(self, libs):
"""
Retrieves the compiler flags for building against the Unreal-bundled versions of the specified third-party libraries
"""
fmt = PrintingFormat.singleLine()
if libs[0] == '--multiline':
fmt = PrintingFormat.multiLine()
libs = libs[1:] # depends on [control=['if'], data=[]]
platformDefaults = True
if libs[0] == '--nodefaults':
platformDefaults = False
libs = libs[1:] # depends on [control=['if'], data=[]]
details = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)
return details.getCompilerFlags(self.getEngineRoot(), fmt)
|
def _broadcast_transport_message(self, origin, message):
"""
Broadcasts an event originating from a transport that does not represent a message from the Pebble.
:param origin: The type of transport responsible for the message.
:type origin: .MessageTarget
:param message: The message from the transport
"""
self.event_handler.broadcast_event((_EventType.Transport, type(origin), type(message)), message)
|
def function[_broadcast_transport_message, parameter[self, origin, message]]:
constant[
Broadcasts an event originating from a transport that does not represent a message from the Pebble.
:param origin: The type of transport responsible for the message.
:type origin: .MessageTarget
:param message: The message from the transport
]
call[name[self].event_handler.broadcast_event, parameter[tuple[[<ast.Attribute object at 0x7da1b0b652a0>, <ast.Call object at 0x7da1b0c96b90>, <ast.Call object at 0x7da1b0c96710>]], name[message]]]
|
keyword[def] identifier[_broadcast_transport_message] ( identifier[self] , identifier[origin] , identifier[message] ):
literal[string]
identifier[self] . identifier[event_handler] . identifier[broadcast_event] (( identifier[_EventType] . identifier[Transport] , identifier[type] ( identifier[origin] ), identifier[type] ( identifier[message] )), identifier[message] )
|
def _broadcast_transport_message(self, origin, message):
"""
Broadcasts an event originating from a transport that does not represent a message from the Pebble.
:param origin: The type of transport responsible for the message.
:type origin: .MessageTarget
:param message: The message from the transport
"""
self.event_handler.broadcast_event((_EventType.Transport, type(origin), type(message)), message)
|
def save_object(fname, obj):
"""Pickle a Python object"""
fd = gzip.open(fname, "wb")
six.moves.cPickle.dump(obj, fd)
fd.close()
|
def function[save_object, parameter[fname, obj]]:
constant[Pickle a Python object]
variable[fd] assign[=] call[name[gzip].open, parameter[name[fname], constant[wb]]]
call[name[six].moves.cPickle.dump, parameter[name[obj], name[fd]]]
call[name[fd].close, parameter[]]
|
keyword[def] identifier[save_object] ( identifier[fname] , identifier[obj] ):
literal[string]
identifier[fd] = identifier[gzip] . identifier[open] ( identifier[fname] , literal[string] )
identifier[six] . identifier[moves] . identifier[cPickle] . identifier[dump] ( identifier[obj] , identifier[fd] )
identifier[fd] . identifier[close] ()
|
def save_object(fname, obj):
"""Pickle a Python object"""
fd = gzip.open(fname, 'wb')
six.moves.cPickle.dump(obj, fd)
fd.close()
|
def run(self):
"""
Continuously retrieve client requests until given "stop" request.
"""
while True:
self._logger.debug('Accepting connection')
conn, addr = self._sock.accept()
self._talk = SocketTalk(conn, encode=self._encode)
self._logger.debug('Receiving action')
action = self._receive()
if self._on_action:
self._on_action(action)
key = None
if action in ('set', 'get', 'del',):
self._logger.debug('Receiving key')
key = self._receive()
value = None
if action in ('set',):
self._logger.debug('Receiving value')
value = self._receive()
# Process the request.
if action == 'stop':
self._send('ok')
elif action == 'set':
self._data[key] = value
self._send('ok')
elif action == 'get':
try:
value = self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
self._logger.debug('Sending value')
self._send(value)
elif action == 'del':
try:
del self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
elif action == 'size':
self._send('ok')
self._send(str(len(self._data)))
elif action == 'keys':
pickled = pickle.dumps(self._data.keys())
self._send('ok')
self._send(pickled)
else:
self._send('unknown action %s' % action)
self._logger.debug('Closing')
try:
conn.shutdown(socket.SHUT_RDWR)
except:
self._logger.error('Failed to shutdown')
conn.close()
if action == 'stop':
break
self._logger.debug('Stopped')
|
def function[run, parameter[self]]:
constant[
Continuously retrieve client requests until given "stop" request.
]
while constant[True] begin[:]
call[name[self]._logger.debug, parameter[constant[Accepting connection]]]
<ast.Tuple object at 0x7da18bcc8e80> assign[=] call[name[self]._sock.accept, parameter[]]
name[self]._talk assign[=] call[name[SocketTalk], parameter[name[conn]]]
call[name[self]._logger.debug, parameter[constant[Receiving action]]]
variable[action] assign[=] call[name[self]._receive, parameter[]]
if name[self]._on_action begin[:]
call[name[self]._on_action, parameter[name[action]]]
variable[key] assign[=] constant[None]
if compare[name[action] in tuple[[<ast.Constant object at 0x7da18bcc90c0>, <ast.Constant object at 0x7da18bcc9ff0>, <ast.Constant object at 0x7da18bcca1a0>]]] begin[:]
call[name[self]._logger.debug, parameter[constant[Receiving key]]]
variable[key] assign[=] call[name[self]._receive, parameter[]]
variable[value] assign[=] constant[None]
if compare[name[action] in tuple[[<ast.Constant object at 0x7da18bcc8370>]]] begin[:]
call[name[self]._logger.debug, parameter[constant[Receiving value]]]
variable[value] assign[=] call[name[self]._receive, parameter[]]
if compare[name[action] equal[==] constant[stop]] begin[:]
call[name[self]._send, parameter[constant[ok]]]
call[name[self]._logger.debug, parameter[constant[Closing]]]
<ast.Try object at 0x7da18dc981c0>
call[name[conn].close, parameter[]]
if compare[name[action] equal[==] constant[stop]] begin[:]
break
call[name[self]._logger.debug, parameter[constant[Stopped]]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[conn] , identifier[addr] = identifier[self] . identifier[_sock] . identifier[accept] ()
identifier[self] . identifier[_talk] = identifier[SocketTalk] ( identifier[conn] , identifier[encode] = identifier[self] . identifier[_encode] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[action] = identifier[self] . identifier[_receive] ()
keyword[if] identifier[self] . identifier[_on_action] :
identifier[self] . identifier[_on_action] ( identifier[action] )
identifier[key] = keyword[None]
keyword[if] identifier[action] keyword[in] ( literal[string] , literal[string] , literal[string] ,):
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[key] = identifier[self] . identifier[_receive] ()
identifier[value] = keyword[None]
keyword[if] identifier[action] keyword[in] ( literal[string] ,):
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[value] = identifier[self] . identifier[_receive] ()
keyword[if] identifier[action] == literal[string] :
identifier[self] . identifier[_send] ( literal[string] )
keyword[elif] identifier[action] == literal[string] :
identifier[self] . identifier[_data] [ identifier[key] ]= identifier[value]
identifier[self] . identifier[_send] ( literal[string] )
keyword[elif] identifier[action] == literal[string] :
keyword[try] :
identifier[value] = identifier[self] . identifier[_data] [ identifier[key] ]
keyword[except] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_send] ( literal[string] )
keyword[else] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_send] ( literal[string] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_send] ( identifier[value] )
keyword[elif] identifier[action] == literal[string] :
keyword[try] :
keyword[del] identifier[self] . identifier[_data] [ identifier[key] ]
keyword[except] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_send] ( literal[string] )
keyword[else] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_send] ( literal[string] )
keyword[elif] identifier[action] == literal[string] :
identifier[self] . identifier[_send] ( literal[string] )
identifier[self] . identifier[_send] ( identifier[str] ( identifier[len] ( identifier[self] . identifier[_data] )))
keyword[elif] identifier[action] == literal[string] :
identifier[pickled] = identifier[pickle] . identifier[dumps] ( identifier[self] . identifier[_data] . identifier[keys] ())
identifier[self] . identifier[_send] ( literal[string] )
identifier[self] . identifier[_send] ( identifier[pickled] )
keyword[else] :
identifier[self] . identifier[_send] ( literal[string] % identifier[action] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[conn] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
keyword[except] :
identifier[self] . identifier[_logger] . identifier[error] ( literal[string] )
identifier[conn] . identifier[close] ()
keyword[if] identifier[action] == literal[string] :
keyword[break]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
|
def run(self):
"""
Continuously retrieve client requests until given "stop" request.
"""
while True:
self._logger.debug('Accepting connection')
(conn, addr) = self._sock.accept()
self._talk = SocketTalk(conn, encode=self._encode)
self._logger.debug('Receiving action')
action = self._receive()
if self._on_action:
self._on_action(action) # depends on [control=['if'], data=[]]
key = None
if action in ('set', 'get', 'del'):
self._logger.debug('Receiving key')
key = self._receive() # depends on [control=['if'], data=[]]
value = None
if action in ('set',):
self._logger.debug('Receiving value')
value = self._receive() # depends on [control=['if'], data=[]]
# Process the request.
if action == 'stop':
self._send('ok') # depends on [control=['if'], data=[]]
elif action == 'set':
self._data[key] = value
self._send('ok') # depends on [control=['if'], data=[]]
elif action == 'get':
try:
value = self._data[key] # depends on [control=['try'], data=[]]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found') # depends on [control=['except'], data=[]]
else:
self._logger.debug('Sending "ok"')
self._send('ok')
self._logger.debug('Sending value')
self._send(value) # depends on [control=['if'], data=[]]
elif action == 'del':
try:
del self._data[key] # depends on [control=['try'], data=[]]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found') # depends on [control=['except'], data=[]]
else:
self._logger.debug('Sending "ok"')
self._send('ok') # depends on [control=['if'], data=[]]
elif action == 'size':
self._send('ok')
self._send(str(len(self._data))) # depends on [control=['if'], data=[]]
elif action == 'keys':
pickled = pickle.dumps(self._data.keys())
self._send('ok')
self._send(pickled) # depends on [control=['if'], data=[]]
else:
self._send('unknown action %s' % action)
self._logger.debug('Closing')
try:
conn.shutdown(socket.SHUT_RDWR) # depends on [control=['try'], data=[]]
except:
self._logger.error('Failed to shutdown') # depends on [control=['except'], data=[]]
conn.close()
if action == 'stop':
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
self._logger.debug('Stopped')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.