code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def opt_separator(self) -> bool:
"""Parse an optional separator and return ``True`` if found.
Raises:
EndOfInput: If past the end of input.
"""
start = self.offset
self.dfa([
{ # state 0: whitespace
"": lambda: -1,
" ": lambda: 0,
"\t": lambda: 0,
"\n": lambda: 0,
"\r": lambda: 1,
"/": lambda: 2
},
{ # state 1: CR/LF?
"": self._back_break,
"\n": lambda: 0
},
{ # state 2: start comment?
"": self._back_break,
"/": lambda: 3,
"*": lambda: 4
},
{ # state 3: line comment
"": lambda: 3,
"\n": lambda: 0
},
{ # state 4: block comment
"": lambda: 4,
"*": lambda: 5
},
{ # state 5: end block comment?
"": lambda: 4,
"/": lambda: 0,
"*": lambda: 5
}])
return start < self.offset | def function[opt_separator, parameter[self]]:
constant[Parse an optional separator and return ``True`` if found.
Raises:
EndOfInput: If past the end of input.
]
variable[start] assign[=] name[self].offset
call[name[self].dfa, parameter[list[[<ast.Dict object at 0x7da1b02e6230>, <ast.Dict object at 0x7da1b02e4f70>, <ast.Dict object at 0x7da1b02e4310>, <ast.Dict object at 0x7da1b02e7f10>, <ast.Dict object at 0x7da1b02e5fc0>, <ast.Dict object at 0x7da1b02e7b80>]]]]
return[compare[name[start] less[<] name[self].offset]] | keyword[def] identifier[opt_separator] ( identifier[self] )-> identifier[bool] :
literal[string]
identifier[start] = identifier[self] . identifier[offset]
identifier[self] . identifier[dfa] ([
{
literal[string] : keyword[lambda] :- literal[int] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int]
},
{
literal[string] : identifier[self] . identifier[_back_break] ,
literal[string] : keyword[lambda] : literal[int]
},
{
literal[string] : identifier[self] . identifier[_back_break] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int]
},
{
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int]
},
{
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int]
},
{
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int] ,
literal[string] : keyword[lambda] : literal[int]
}])
keyword[return] identifier[start] < identifier[self] . identifier[offset] | def opt_separator(self) -> bool:
"""Parse an optional separator and return ``True`` if found.
Raises:
EndOfInput: If past the end of input.
"""
start = self.offset # state 0: whitespace
# state 1: CR/LF?
# state 2: start comment?
# state 3: line comment
# state 4: block comment
# state 5: end block comment?
self.dfa([{'': lambda : -1, ' ': lambda : 0, '\t': lambda : 0, '\n': lambda : 0, '\r': lambda : 1, '/': lambda : 2}, {'': self._back_break, '\n': lambda : 0}, {'': self._back_break, '/': lambda : 3, '*': lambda : 4}, {'': lambda : 3, '\n': lambda : 0}, {'': lambda : 4, '*': lambda : 5}, {'': lambda : 4, '/': lambda : 0, '*': lambda : 5}])
return start < self.offset |
def json_repr(obj, curr_depth=0, max_depth=4):
"""Represent instance of a class as JSON.
Arguments:
obj -- any object
Return:
String that represent JSON-encoded object.
"""
def serialize(obj, curr_depth):
"""Recursively walk object's hierarchy. Limit to max_depth"""
if curr_depth>max_depth:
return
if isinstance(obj, (bool, int, long, float, basestring)):
return obj
elif isinstance(obj, dict):
obj = obj.copy()
for key in obj:
obj[key] = serialize(obj[key], curr_depth+1)
return obj
elif isinstance(obj, list):
return [serialize(item, curr_depth+1) for item in obj]
elif isinstance(obj, tuple):
return tuple(serialize([item for item in obj], curr_depth+1))
elif hasattr(obj, '__dict__'):
return serialize(obj.__dict__, curr_depth+1)
else:
return repr(obj) # Don't know how to handle, convert to string
return (serialize(obj, curr_depth+1)) | def function[json_repr, parameter[obj, curr_depth, max_depth]]:
constant[Represent instance of a class as JSON.
Arguments:
obj -- any object
Return:
String that represent JSON-encoded object.
]
def function[serialize, parameter[obj, curr_depth]]:
constant[Recursively walk object's hierarchy. Limit to max_depth]
if compare[name[curr_depth] greater[>] name[max_depth]] begin[:]
return[None]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da18fe93d00>, <ast.Name object at 0x7da18fe90460>, <ast.Name object at 0x7da18fe93400>, <ast.Name object at 0x7da18fe93700>, <ast.Name object at 0x7da18fe90a60>]]]] begin[:]
return[name[obj]]
return[call[name[serialize], parameter[name[obj], binary_operation[name[curr_depth] + constant[1]]]]] | keyword[def] identifier[json_repr] ( identifier[obj] , identifier[curr_depth] = literal[int] , identifier[max_depth] = literal[int] ):
literal[string]
keyword[def] identifier[serialize] ( identifier[obj] , identifier[curr_depth] ):
literal[string]
keyword[if] identifier[curr_depth] > identifier[max_depth] :
keyword[return]
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[bool] , identifier[int] , identifier[long] , identifier[float] , identifier[basestring] )):
keyword[return] identifier[obj]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
identifier[obj] = identifier[obj] . identifier[copy] ()
keyword[for] identifier[key] keyword[in] identifier[obj] :
identifier[obj] [ identifier[key] ]= identifier[serialize] ( identifier[obj] [ identifier[key] ], identifier[curr_depth] + literal[int] )
keyword[return] identifier[obj]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[list] ):
keyword[return] [ identifier[serialize] ( identifier[item] , identifier[curr_depth] + literal[int] ) keyword[for] identifier[item] keyword[in] identifier[obj] ]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[tuple] ):
keyword[return] identifier[tuple] ( identifier[serialize] ([ identifier[item] keyword[for] identifier[item] keyword[in] identifier[obj] ], identifier[curr_depth] + literal[int] ))
keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[return] identifier[serialize] ( identifier[obj] . identifier[__dict__] , identifier[curr_depth] + literal[int] )
keyword[else] :
keyword[return] identifier[repr] ( identifier[obj] )
keyword[return] ( identifier[serialize] ( identifier[obj] , identifier[curr_depth] + literal[int] )) | def json_repr(obj, curr_depth=0, max_depth=4):
"""Represent instance of a class as JSON.
Arguments:
obj -- any object
Return:
String that represent JSON-encoded object.
"""
def serialize(obj, curr_depth):
"""Recursively walk object's hierarchy. Limit to max_depth"""
if curr_depth > max_depth:
return # depends on [control=['if'], data=[]]
if isinstance(obj, (bool, int, long, float, basestring)):
return obj # depends on [control=['if'], data=[]]
elif isinstance(obj, dict):
obj = obj.copy()
for key in obj:
obj[key] = serialize(obj[key], curr_depth + 1) # depends on [control=['for'], data=['key']]
return obj # depends on [control=['if'], data=[]]
elif isinstance(obj, list):
return [serialize(item, curr_depth + 1) for item in obj] # depends on [control=['if'], data=[]]
elif isinstance(obj, tuple):
return tuple(serialize([item for item in obj], curr_depth + 1)) # depends on [control=['if'], data=[]]
elif hasattr(obj, '__dict__'):
return serialize(obj.__dict__, curr_depth + 1) # depends on [control=['if'], data=[]]
else:
return repr(obj) # Don't know how to handle, convert to string
return serialize(obj, curr_depth + 1) |
def LCHab_to_Lab(cobj, *args, **kwargs):
"""
Convert from LCH(ab) to Lab.
"""
lab_l = cobj.lch_l
lab_a = math.cos(math.radians(cobj.lch_h)) * cobj.lch_c
lab_b = math.sin(math.radians(cobj.lch_h)) * cobj.lch_c
return LabColor(
lab_l, lab_a, lab_b, illuminant=cobj.illuminant, observer=cobj.observer) | def function[LCHab_to_Lab, parameter[cobj]]:
constant[
Convert from LCH(ab) to Lab.
]
variable[lab_l] assign[=] name[cobj].lch_l
variable[lab_a] assign[=] binary_operation[call[name[math].cos, parameter[call[name[math].radians, parameter[name[cobj].lch_h]]]] * name[cobj].lch_c]
variable[lab_b] assign[=] binary_operation[call[name[math].sin, parameter[call[name[math].radians, parameter[name[cobj].lch_h]]]] * name[cobj].lch_c]
return[call[name[LabColor], parameter[name[lab_l], name[lab_a], name[lab_b]]]] | keyword[def] identifier[LCHab_to_Lab] ( identifier[cobj] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[lab_l] = identifier[cobj] . identifier[lch_l]
identifier[lab_a] = identifier[math] . identifier[cos] ( identifier[math] . identifier[radians] ( identifier[cobj] . identifier[lch_h] ))* identifier[cobj] . identifier[lch_c]
identifier[lab_b] = identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[cobj] . identifier[lch_h] ))* identifier[cobj] . identifier[lch_c]
keyword[return] identifier[LabColor] (
identifier[lab_l] , identifier[lab_a] , identifier[lab_b] , identifier[illuminant] = identifier[cobj] . identifier[illuminant] , identifier[observer] = identifier[cobj] . identifier[observer] ) | def LCHab_to_Lab(cobj, *args, **kwargs):
"""
Convert from LCH(ab) to Lab.
"""
lab_l = cobj.lch_l
lab_a = math.cos(math.radians(cobj.lch_h)) * cobj.lch_c
lab_b = math.sin(math.radians(cobj.lch_h)) * cobj.lch_c
return LabColor(lab_l, lab_a, lab_b, illuminant=cobj.illuminant, observer=cobj.observer) |
def fix_post_relative_url(rel_url):
"""
Fix post relative url to a standard, uniform format.
Possible input:
- 2016/7/8/my-post
- 2016/07/08/my-post.html
- 2016/8/09/my-post/
- 2016/8/09/my-post/index
- 2016/8/09/my-post/index.htm
- 2016/8/09/my-post/index.html
:param rel_url: relative url to fix
:return: fixed relative url, or None if cannot recognize
"""
m = re.match(
r'^(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/'
r'(?P<post_name>[^/]+?)'
r'(?:(?:\.html)|(?:/(?P<index>index(?:\.html?)?)?))?$',
rel_url
)
if not m:
return None
year, month, day, post_name = m.groups()[:4]
try:
d = date(year=int(year), month=int(month), day=int(day))
return '/'.join((d.strftime('%Y/%m/%d'), post_name,
'index.html' if m.group('index') else ''))
except (TypeError, ValueError):
# the date is invalid
return None | def function[fix_post_relative_url, parameter[rel_url]]:
constant[
Fix post relative url to a standard, uniform format.
Possible input:
- 2016/7/8/my-post
- 2016/07/08/my-post.html
- 2016/8/09/my-post/
- 2016/8/09/my-post/index
- 2016/8/09/my-post/index.htm
- 2016/8/09/my-post/index.html
:param rel_url: relative url to fix
:return: fixed relative url, or None if cannot recognize
]
variable[m] assign[=] call[name[re].match, parameter[constant[^(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/(?P<post_name>[^/]+?)(?:(?:\.html)|(?:/(?P<index>index(?:\.html?)?)?))?$], name[rel_url]]]
if <ast.UnaryOp object at 0x7da18eb55fc0> begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da18eb55240> assign[=] call[call[name[m].groups, parameter[]]][<ast.Slice object at 0x7da18ede4c10>]
<ast.Try object at 0x7da18ede4310> | keyword[def] identifier[fix_post_relative_url] ( identifier[rel_url] ):
literal[string]
identifier[m] = identifier[re] . identifier[match] (
literal[string]
literal[string]
literal[string] ,
identifier[rel_url]
)
keyword[if] keyword[not] identifier[m] :
keyword[return] keyword[None]
identifier[year] , identifier[month] , identifier[day] , identifier[post_name] = identifier[m] . identifier[groups] ()[: literal[int] ]
keyword[try] :
identifier[d] = identifier[date] ( identifier[year] = identifier[int] ( identifier[year] ), identifier[month] = identifier[int] ( identifier[month] ), identifier[day] = identifier[int] ( identifier[day] ))
keyword[return] literal[string] . identifier[join] (( identifier[d] . identifier[strftime] ( literal[string] ), identifier[post_name] ,
literal[string] keyword[if] identifier[m] . identifier[group] ( literal[string] ) keyword[else] literal[string] ))
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[return] keyword[None] | def fix_post_relative_url(rel_url):
"""
Fix post relative url to a standard, uniform format.
Possible input:
- 2016/7/8/my-post
- 2016/07/08/my-post.html
- 2016/8/09/my-post/
- 2016/8/09/my-post/index
- 2016/8/09/my-post/index.htm
- 2016/8/09/my-post/index.html
:param rel_url: relative url to fix
:return: fixed relative url, or None if cannot recognize
"""
m = re.match('^(?P<year>\\d{4})/(?P<month>\\d{1,2})/(?P<day>\\d{1,2})/(?P<post_name>[^/]+?)(?:(?:\\.html)|(?:/(?P<index>index(?:\\.html?)?)?))?$', rel_url)
if not m:
return None # depends on [control=['if'], data=[]]
(year, month, day, post_name) = m.groups()[:4]
try:
d = date(year=int(year), month=int(month), day=int(day))
return '/'.join((d.strftime('%Y/%m/%d'), post_name, 'index.html' if m.group('index') else '')) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
# the date is invalid
return None # depends on [control=['except'], data=[]] |
def plot_fit_individuals_lens_plane_only(
fit, should_plot_mask=True, extract_array_from_mask=False, zoom_around_mask=False, positions=None,
should_plot_image_plane_pix=False,
should_plot_image=False,
should_plot_noise_map=False,
should_plot_signal_to_noise_map=False,
should_plot_model_image=False,
should_plot_residual_map=False,
should_plot_chi_squared_map=False,
units='arcsec',
output_path=None, output_format='show'):
"""Plot the model datas_ of an analysis, using the *Fitter* class object.
The visualization and output type can be fully customized.
Parameters
-----------
fit : autolens.lens.fitting.Fitter
Class containing fit between the model datas_ and observed lens datas_ (including residual_map, chi_squared_map etc.)
output_path : str
The path where the datas_ is output if the output_type is a file format (e.g. png, fits)
output_format : str
How the datas_ is output. File formats (e.g. png, fits) output the datas_ to harddisk. 'show' displays the datas_ \
in the python interpreter window.
"""
mask = lens_plotter_util.get_mask(fit=fit, should_plot_mask=should_plot_mask)
kpc_per_arcsec = fit.tracer.image_plane.kpc_per_arcsec
if should_plot_image:
image_plane_pix_grid = lens_plotter_util.get_image_plane_pix_grid(should_plot_image_plane_pix, fit)
lens_plotter_util.plot_image(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
image_plane_pix_grid=image_plane_pix_grid,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_noise_map:
lens_plotter_util.plot_noise_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_signal_to_noise_map:
lens_plotter_util.plot_signal_to_noise_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_model_image:
lens_plotter_util.plot_model_data(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, positions=positions,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_residual_map:
lens_plotter_util.plot_residual_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_chi_squared_map:
lens_plotter_util.plot_chi_squared_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format) | def function[plot_fit_individuals_lens_plane_only, parameter[fit, should_plot_mask, extract_array_from_mask, zoom_around_mask, positions, should_plot_image_plane_pix, should_plot_image, should_plot_noise_map, should_plot_signal_to_noise_map, should_plot_model_image, should_plot_residual_map, should_plot_chi_squared_map, units, output_path, output_format]]:
constant[Plot the model datas_ of an analysis, using the *Fitter* class object.
The visualization and output type can be fully customized.
Parameters
-----------
fit : autolens.lens.fitting.Fitter
Class containing fit between the model datas_ and observed lens datas_ (including residual_map, chi_squared_map etc.)
output_path : str
The path where the datas_ is output if the output_type is a file format (e.g. png, fits)
output_format : str
How the datas_ is output. File formats (e.g. png, fits) output the datas_ to harddisk. 'show' displays the datas_ in the python interpreter window.
]
variable[mask] assign[=] call[name[lens_plotter_util].get_mask, parameter[]]
variable[kpc_per_arcsec] assign[=] name[fit].tracer.image_plane.kpc_per_arcsec
if name[should_plot_image] begin[:]
variable[image_plane_pix_grid] assign[=] call[name[lens_plotter_util].get_image_plane_pix_grid, parameter[name[should_plot_image_plane_pix], name[fit]]]
call[name[lens_plotter_util].plot_image, parameter[]]
if name[should_plot_noise_map] begin[:]
call[name[lens_plotter_util].plot_noise_map, parameter[]]
if name[should_plot_signal_to_noise_map] begin[:]
call[name[lens_plotter_util].plot_signal_to_noise_map, parameter[]]
if name[should_plot_model_image] begin[:]
call[name[lens_plotter_util].plot_model_data, parameter[]]
if name[should_plot_residual_map] begin[:]
call[name[lens_plotter_util].plot_residual_map, parameter[]]
if name[should_plot_chi_squared_map] begin[:]
call[name[lens_plotter_util].plot_chi_squared_map, parameter[]] | keyword[def] identifier[plot_fit_individuals_lens_plane_only] (
identifier[fit] , identifier[should_plot_mask] = keyword[True] , identifier[extract_array_from_mask] = keyword[False] , identifier[zoom_around_mask] = keyword[False] , identifier[positions] = keyword[None] ,
identifier[should_plot_image_plane_pix] = keyword[False] ,
identifier[should_plot_image] = keyword[False] ,
identifier[should_plot_noise_map] = keyword[False] ,
identifier[should_plot_signal_to_noise_map] = keyword[False] ,
identifier[should_plot_model_image] = keyword[False] ,
identifier[should_plot_residual_map] = keyword[False] ,
identifier[should_plot_chi_squared_map] = keyword[False] ,
identifier[units] = literal[string] ,
identifier[output_path] = keyword[None] , identifier[output_format] = literal[string] ):
literal[string]
identifier[mask] = identifier[lens_plotter_util] . identifier[get_mask] ( identifier[fit] = identifier[fit] , identifier[should_plot_mask] = identifier[should_plot_mask] )
identifier[kpc_per_arcsec] = identifier[fit] . identifier[tracer] . identifier[image_plane] . identifier[kpc_per_arcsec]
keyword[if] identifier[should_plot_image] :
identifier[image_plane_pix_grid] = identifier[lens_plotter_util] . identifier[get_image_plane_pix_grid] ( identifier[should_plot_image_plane_pix] , identifier[fit] )
identifier[lens_plotter_util] . identifier[plot_image] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] ,
identifier[image_plane_pix_grid] = identifier[image_plane_pix_grid] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] )
keyword[if] identifier[should_plot_noise_map] :
identifier[lens_plotter_util] . identifier[plot_noise_map] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] )
keyword[if] identifier[should_plot_signal_to_noise_map] :
identifier[lens_plotter_util] . identifier[plot_signal_to_noise_map] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] )
keyword[if] identifier[should_plot_model_image] :
identifier[lens_plotter_util] . identifier[plot_model_data] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] , identifier[positions] = identifier[positions] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] )
keyword[if] identifier[should_plot_residual_map] :
identifier[lens_plotter_util] . identifier[plot_residual_map] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] )
keyword[if] identifier[should_plot_chi_squared_map] :
identifier[lens_plotter_util] . identifier[plot_chi_squared_map] (
identifier[fit] = identifier[fit] , identifier[mask] = identifier[mask] , identifier[extract_array_from_mask] = identifier[extract_array_from_mask] , identifier[zoom_around_mask] = identifier[zoom_around_mask] ,
identifier[units] = identifier[units] , identifier[kpc_per_arcsec] = identifier[kpc_per_arcsec] ,
identifier[output_path] = identifier[output_path] , identifier[output_format] = identifier[output_format] ) | def plot_fit_individuals_lens_plane_only(fit, should_plot_mask=True, extract_array_from_mask=False, zoom_around_mask=False, positions=None, should_plot_image_plane_pix=False, should_plot_image=False, should_plot_noise_map=False, should_plot_signal_to_noise_map=False, should_plot_model_image=False, should_plot_residual_map=False, should_plot_chi_squared_map=False, units='arcsec', output_path=None, output_format='show'):
"""Plot the model datas_ of an analysis, using the *Fitter* class object.
The visualization and output type can be fully customized.
Parameters
-----------
fit : autolens.lens.fitting.Fitter
Class containing fit between the model datas_ and observed lens datas_ (including residual_map, chi_squared_map etc.)
output_path : str
The path where the datas_ is output if the output_type is a file format (e.g. png, fits)
output_format : str
How the datas_ is output. File formats (e.g. png, fits) output the datas_ to harddisk. 'show' displays the datas_ in the python interpreter window.
"""
mask = lens_plotter_util.get_mask(fit=fit, should_plot_mask=should_plot_mask)
kpc_per_arcsec = fit.tracer.image_plane.kpc_per_arcsec
if should_plot_image:
image_plane_pix_grid = lens_plotter_util.get_image_plane_pix_grid(should_plot_image_plane_pix, fit)
lens_plotter_util.plot_image(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, image_plane_pix_grid=image_plane_pix_grid, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]]
if should_plot_noise_map:
lens_plotter_util.plot_noise_map(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]]
if should_plot_signal_to_noise_map:
lens_plotter_util.plot_signal_to_noise_map(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]]
if should_plot_model_image:
lens_plotter_util.plot_model_data(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, positions=positions, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]]
if should_plot_residual_map:
lens_plotter_util.plot_residual_map(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]]
if should_plot_chi_squared_map:
lens_plotter_util.plot_chi_squared_map(fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, units=units, kpc_per_arcsec=kpc_per_arcsec, output_path=output_path, output_format=output_format) # depends on [control=['if'], data=[]] |
def register_url(url, handler, name=None, kwargs=None):
"""insert url into tornado application handlers group
:arg str url: url
:handler object handler: url mapping handler
:name reverse url name
:kwargs dict tornado handler initlize args
"""
if name is None and kwargs is None:
app_config.urls.append((url, handler))
return
if name is None:
app_config.urls.append((url, handler, kwargs))
return
app_config.urls.append((url, handler, kwargs, name)) | def function[register_url, parameter[url, handler, name, kwargs]]:
constant[insert url into tornado application handlers group
:arg str url: url
:handler object handler: url mapping handler
:name reverse url name
:kwargs dict tornado handler initlize args
]
if <ast.BoolOp object at 0x7da1b04eec80> begin[:]
call[name[app_config].urls.append, parameter[tuple[[<ast.Name object at 0x7da1b04161a0>, <ast.Name object at 0x7da1b04172b0>]]]]
return[None]
if compare[name[name] is constant[None]] begin[:]
call[name[app_config].urls.append, parameter[tuple[[<ast.Name object at 0x7da1b05b5cc0>, <ast.Name object at 0x7da1b05b45e0>, <ast.Name object at 0x7da1b05b5600>]]]]
return[None]
call[name[app_config].urls.append, parameter[tuple[[<ast.Name object at 0x7da1b05b6c80>, <ast.Name object at 0x7da1b05b42b0>, <ast.Name object at 0x7da1b05b6020>, <ast.Name object at 0x7da1b05b7760>]]]] | keyword[def] identifier[register_url] ( identifier[url] , identifier[handler] , identifier[name] = keyword[None] , identifier[kwargs] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] keyword[and] identifier[kwargs] keyword[is] keyword[None] :
identifier[app_config] . identifier[urls] . identifier[append] (( identifier[url] , identifier[handler] ))
keyword[return]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[app_config] . identifier[urls] . identifier[append] (( identifier[url] , identifier[handler] , identifier[kwargs] ))
keyword[return]
identifier[app_config] . identifier[urls] . identifier[append] (( identifier[url] , identifier[handler] , identifier[kwargs] , identifier[name] )) | def register_url(url, handler, name=None, kwargs=None):
"""insert url into tornado application handlers group
:arg str url: url
:handler object handler: url mapping handler
:name reverse url name
:kwargs dict tornado handler initlize args
"""
if name is None and kwargs is None:
app_config.urls.append((url, handler))
return # depends on [control=['if'], data=[]]
if name is None:
app_config.urls.append((url, handler, kwargs))
return # depends on [control=['if'], data=[]]
app_config.urls.append((url, handler, kwargs, name)) |
def process_raw(self, raw: dict) -> None:
"""Pre-process raw dict.
Prepare parameters to work with APIItems.
"""
raw_ports = {}
for param in raw:
port_index = REGEX_PORT_INDEX.search(param).group(0)
if port_index not in raw_ports:
raw_ports[port_index] = {}
name = param.replace(IOPORT + '.I' + port_index + '.', '')
raw_ports[port_index][name] = raw[param]
super().process_raw(raw_ports) | def function[process_raw, parameter[self, raw]]:
constant[Pre-process raw dict.
Prepare parameters to work with APIItems.
]
variable[raw_ports] assign[=] dictionary[[], []]
for taget[name[param]] in starred[name[raw]] begin[:]
variable[port_index] assign[=] call[call[name[REGEX_PORT_INDEX].search, parameter[name[param]]].group, parameter[constant[0]]]
if compare[name[port_index] <ast.NotIn object at 0x7da2590d7190> name[raw_ports]] begin[:]
call[name[raw_ports]][name[port_index]] assign[=] dictionary[[], []]
variable[name] assign[=] call[name[param].replace, parameter[binary_operation[binary_operation[binary_operation[name[IOPORT] + constant[.I]] + name[port_index]] + constant[.]], constant[]]]
call[call[name[raw_ports]][name[port_index]]][name[name]] assign[=] call[name[raw]][name[param]]
call[call[name[super], parameter[]].process_raw, parameter[name[raw_ports]]] | keyword[def] identifier[process_raw] ( identifier[self] , identifier[raw] : identifier[dict] )-> keyword[None] :
literal[string]
identifier[raw_ports] ={}
keyword[for] identifier[param] keyword[in] identifier[raw] :
identifier[port_index] = identifier[REGEX_PORT_INDEX] . identifier[search] ( identifier[param] ). identifier[group] ( literal[int] )
keyword[if] identifier[port_index] keyword[not] keyword[in] identifier[raw_ports] :
identifier[raw_ports] [ identifier[port_index] ]={}
identifier[name] = identifier[param] . identifier[replace] ( identifier[IOPORT] + literal[string] + identifier[port_index] + literal[string] , literal[string] )
identifier[raw_ports] [ identifier[port_index] ][ identifier[name] ]= identifier[raw] [ identifier[param] ]
identifier[super] (). identifier[process_raw] ( identifier[raw_ports] ) | def process_raw(self, raw: dict) -> None:
"""Pre-process raw dict.
Prepare parameters to work with APIItems.
"""
raw_ports = {}
for param in raw:
port_index = REGEX_PORT_INDEX.search(param).group(0)
if port_index not in raw_ports:
raw_ports[port_index] = {} # depends on [control=['if'], data=['port_index', 'raw_ports']]
name = param.replace(IOPORT + '.I' + port_index + '.', '')
raw_ports[port_index][name] = raw[param] # depends on [control=['for'], data=['param']]
super().process_raw(raw_ports) |
def encrypt(self, plaintext, iv_bytes=None):
"""Given ``plaintext``, returns a ``ciphertext`` encrypted with an authenticated-encryption scheme, using the keys specified in ``__init__``.
Ciphertext expansion is deterministic, the output ciphertext is always 42 bytes longer than the input ``plaintext``.
The input ``plaintext`` can be ``''``.
Raises ``PlaintextTypeError`` if input plaintext is not a string.
"""
if not isinstance(plaintext, str):
raise PlaintextTypeError("Input plaintext is not of type string")
if iv_bytes is None:
iv_bytes = fte.bit_ops.random_bytes(Encrypter._IV_LENGTH)
iv1_bytes = '\x01' + iv_bytes
iv2_bytes = '\x02' + iv_bytes
W1 = iv1_bytes
W1 += fte.bit_ops.long_to_bytes(
len(plaintext), Encrypter._MSG_COUNTER_LENGTH)
W1 = self._ecb_enc_K1.encrypt(W1)
counter_length_in_bits = AES.block_size * 8
counter_val = fte.bit_ops.bytes_to_long(iv2_bytes)
counter = Counter.new(
counter_length_in_bits, initial_value=counter_val)
ctr_enc = AES.new(key=self.K1,
mode=AES.MODE_CTR,
IV='\x00' * 8 + iv2_bytes,
counter=counter)
W2 = ctr_enc.encrypt(plaintext)
mac = HMAC.new(self.K2, W1 + W2, SHA512)
T = mac.digest()
T = T[:Encrypter._MAC_LENGTH]
ciphertext = W1 + W2 + T
return ciphertext | def function[encrypt, parameter[self, plaintext, iv_bytes]]:
constant[Given ``plaintext``, returns a ``ciphertext`` encrypted with an authenticated-encryption scheme, using the keys specified in ``__init__``.
Ciphertext expansion is deterministic, the output ciphertext is always 42 bytes longer than the input ``plaintext``.
The input ``plaintext`` can be ``''``.
Raises ``PlaintextTypeError`` if input plaintext is not a string.
]
if <ast.UnaryOp object at 0x7da1b1af0220> begin[:]
<ast.Raise object at 0x7da1b1af19f0>
if compare[name[iv_bytes] is constant[None]] begin[:]
variable[iv_bytes] assign[=] call[name[fte].bit_ops.random_bytes, parameter[name[Encrypter]._IV_LENGTH]]
variable[iv1_bytes] assign[=] binary_operation[constant[] + name[iv_bytes]]
variable[iv2_bytes] assign[=] binary_operation[constant[] + name[iv_bytes]]
variable[W1] assign[=] name[iv1_bytes]
<ast.AugAssign object at 0x7da1b1af0370>
variable[W1] assign[=] call[name[self]._ecb_enc_K1.encrypt, parameter[name[W1]]]
variable[counter_length_in_bits] assign[=] binary_operation[name[AES].block_size * constant[8]]
variable[counter_val] assign[=] call[name[fte].bit_ops.bytes_to_long, parameter[name[iv2_bytes]]]
variable[counter] assign[=] call[name[Counter].new, parameter[name[counter_length_in_bits]]]
variable[ctr_enc] assign[=] call[name[AES].new, parameter[]]
variable[W2] assign[=] call[name[ctr_enc].encrypt, parameter[name[plaintext]]]
variable[mac] assign[=] call[name[HMAC].new, parameter[name[self].K2, binary_operation[name[W1] + name[W2]], name[SHA512]]]
variable[T] assign[=] call[name[mac].digest, parameter[]]
variable[T] assign[=] call[name[T]][<ast.Slice object at 0x7da20c7cb970>]
variable[ciphertext] assign[=] binary_operation[binary_operation[name[W1] + name[W2]] + name[T]]
return[name[ciphertext]] | keyword[def] identifier[encrypt] ( identifier[self] , identifier[plaintext] , identifier[iv_bytes] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[plaintext] , identifier[str] ):
keyword[raise] identifier[PlaintextTypeError] ( literal[string] )
keyword[if] identifier[iv_bytes] keyword[is] keyword[None] :
identifier[iv_bytes] = identifier[fte] . identifier[bit_ops] . identifier[random_bytes] ( identifier[Encrypter] . identifier[_IV_LENGTH] )
identifier[iv1_bytes] = literal[string] + identifier[iv_bytes]
identifier[iv2_bytes] = literal[string] + identifier[iv_bytes]
identifier[W1] = identifier[iv1_bytes]
identifier[W1] += identifier[fte] . identifier[bit_ops] . identifier[long_to_bytes] (
identifier[len] ( identifier[plaintext] ), identifier[Encrypter] . identifier[_MSG_COUNTER_LENGTH] )
identifier[W1] = identifier[self] . identifier[_ecb_enc_K1] . identifier[encrypt] ( identifier[W1] )
identifier[counter_length_in_bits] = identifier[AES] . identifier[block_size] * literal[int]
identifier[counter_val] = identifier[fte] . identifier[bit_ops] . identifier[bytes_to_long] ( identifier[iv2_bytes] )
identifier[counter] = identifier[Counter] . identifier[new] (
identifier[counter_length_in_bits] , identifier[initial_value] = identifier[counter_val] )
identifier[ctr_enc] = identifier[AES] . identifier[new] ( identifier[key] = identifier[self] . identifier[K1] ,
identifier[mode] = identifier[AES] . identifier[MODE_CTR] ,
identifier[IV] = literal[string] * literal[int] + identifier[iv2_bytes] ,
identifier[counter] = identifier[counter] )
identifier[W2] = identifier[ctr_enc] . identifier[encrypt] ( identifier[plaintext] )
identifier[mac] = identifier[HMAC] . identifier[new] ( identifier[self] . identifier[K2] , identifier[W1] + identifier[W2] , identifier[SHA512] )
identifier[T] = identifier[mac] . identifier[digest] ()
identifier[T] = identifier[T] [: identifier[Encrypter] . identifier[_MAC_LENGTH] ]
identifier[ciphertext] = identifier[W1] + identifier[W2] + identifier[T]
keyword[return] identifier[ciphertext] | def encrypt(self, plaintext, iv_bytes=None):
"""Given ``plaintext``, returns a ``ciphertext`` encrypted with an authenticated-encryption scheme, using the keys specified in ``__init__``.
Ciphertext expansion is deterministic, the output ciphertext is always 42 bytes longer than the input ``plaintext``.
The input ``plaintext`` can be ``''``.
Raises ``PlaintextTypeError`` if input plaintext is not a string.
"""
if not isinstance(plaintext, str):
raise PlaintextTypeError('Input plaintext is not of type string') # depends on [control=['if'], data=[]]
if iv_bytes is None:
iv_bytes = fte.bit_ops.random_bytes(Encrypter._IV_LENGTH) # depends on [control=['if'], data=['iv_bytes']]
iv1_bytes = '\x01' + iv_bytes
iv2_bytes = '\x02' + iv_bytes
W1 = iv1_bytes
W1 += fte.bit_ops.long_to_bytes(len(plaintext), Encrypter._MSG_COUNTER_LENGTH)
W1 = self._ecb_enc_K1.encrypt(W1)
counter_length_in_bits = AES.block_size * 8
counter_val = fte.bit_ops.bytes_to_long(iv2_bytes)
counter = Counter.new(counter_length_in_bits, initial_value=counter_val)
ctr_enc = AES.new(key=self.K1, mode=AES.MODE_CTR, IV='\x00' * 8 + iv2_bytes, counter=counter)
W2 = ctr_enc.encrypt(plaintext)
mac = HMAC.new(self.K2, W1 + W2, SHA512)
T = mac.digest()
T = T[:Encrypter._MAC_LENGTH]
ciphertext = W1 + W2 + T
return ciphertext |
def py_to_glsl(root):
"""Translate Python AST into GLSL code.
root: an ast.FunctionDef object
Return a list of strings, where each string is a line of GLSL
code.
"""
atg = AstToGlsl()
code = atg.visit(root)
return code.lines | def function[py_to_glsl, parameter[root]]:
constant[Translate Python AST into GLSL code.
root: an ast.FunctionDef object
Return a list of strings, where each string is a line of GLSL
code.
]
variable[atg] assign[=] call[name[AstToGlsl], parameter[]]
variable[code] assign[=] call[name[atg].visit, parameter[name[root]]]
return[name[code].lines] | keyword[def] identifier[py_to_glsl] ( identifier[root] ):
literal[string]
identifier[atg] = identifier[AstToGlsl] ()
identifier[code] = identifier[atg] . identifier[visit] ( identifier[root] )
keyword[return] identifier[code] . identifier[lines] | def py_to_glsl(root):
"""Translate Python AST into GLSL code.
root: an ast.FunctionDef object
Return a list of strings, where each string is a line of GLSL
code.
"""
atg = AstToGlsl()
code = atg.visit(root)
return code.lines |
def nearest_subpackage(cls, package, all_packages):
"""Given a package, find its nearest parent in all_packages."""
def shared_prefix(candidate):
zipped = zip(package.split('.'), candidate.split('.'))
matching = itertools.takewhile(lambda pair: pair[0] == pair[1], zipped)
return [pair[0] for pair in matching]
shared_packages = [_f for _f in map(shared_prefix, all_packages) if _f]
return '.'.join(max(shared_packages, key=len)) if shared_packages else package | def function[nearest_subpackage, parameter[cls, package, all_packages]]:
constant[Given a package, find its nearest parent in all_packages.]
def function[shared_prefix, parameter[candidate]]:
variable[zipped] assign[=] call[name[zip], parameter[call[name[package].split, parameter[constant[.]]], call[name[candidate].split, parameter[constant[.]]]]]
variable[matching] assign[=] call[name[itertools].takewhile, parameter[<ast.Lambda object at 0x7da1b22a7580>, name[zipped]]]
return[<ast.ListComp object at 0x7da1b22a7400>]
variable[shared_packages] assign[=] <ast.ListComp object at 0x7da1b22a7e50>
return[<ast.IfExp object at 0x7da1b22b97b0>] | keyword[def] identifier[nearest_subpackage] ( identifier[cls] , identifier[package] , identifier[all_packages] ):
literal[string]
keyword[def] identifier[shared_prefix] ( identifier[candidate] ):
identifier[zipped] = identifier[zip] ( identifier[package] . identifier[split] ( literal[string] ), identifier[candidate] . identifier[split] ( literal[string] ))
identifier[matching] = identifier[itertools] . identifier[takewhile] ( keyword[lambda] identifier[pair] : identifier[pair] [ literal[int] ]== identifier[pair] [ literal[int] ], identifier[zipped] )
keyword[return] [ identifier[pair] [ literal[int] ] keyword[for] identifier[pair] keyword[in] identifier[matching] ]
identifier[shared_packages] =[ identifier[_f] keyword[for] identifier[_f] keyword[in] identifier[map] ( identifier[shared_prefix] , identifier[all_packages] ) keyword[if] identifier[_f] ]
keyword[return] literal[string] . identifier[join] ( identifier[max] ( identifier[shared_packages] , identifier[key] = identifier[len] )) keyword[if] identifier[shared_packages] keyword[else] identifier[package] | def nearest_subpackage(cls, package, all_packages):
"""Given a package, find its nearest parent in all_packages."""
def shared_prefix(candidate):
zipped = zip(package.split('.'), candidate.split('.'))
matching = itertools.takewhile(lambda pair: pair[0] == pair[1], zipped)
return [pair[0] for pair in matching]
shared_packages = [_f for _f in map(shared_prefix, all_packages) if _f]
return '.'.join(max(shared_packages, key=len)) if shared_packages else package |
def Extract(fileList, fileFormatList, archiveDir, skipUserInput):
"""
Iterate through given file list and extract all files matching the file
format list from each RAR file. After sucessful extraction move RAR files to
archive directory.
Parameters
----------
fileList : list
List of files to attempt to extract.
fileFormatList : list
List of file formats to extract from each RAR archive.
archiveDir : string
Directory to move RAR files once extract is complete.
skipUserInput : boolean
Set to skip any potential user input (if a single option is available
it will be selected otherwise the user input will default to take no action).
"""
goodlogging.Log.Info("EXTRACT", "Extracting files from compressed archives")
goodlogging.Log.IncreaseIndent()
if len(fileList) == 0:
goodlogging.Log.Info("EXTRACT", "No files to extract")
goodlogging.Log.DecreaseIndent()
return None
firstPartExtractList = []
otherPartSkippedList = []
lastPassword = False
reuseLastPassword = 0
for filePath in fileList:
goodlogging.Log.Info("EXTRACT", "{0}".format(filePath))
goodlogging.Log.IncreaseIndent()
try:
rarArchive = rarfile.RarFile(filePath)
except ImportError:
goodlogging.Log.Info("EXTRACT", "Unable to extract - Python needs the rarfile package to be installed (see README for more details)")
except rarfile.NeedFirstVolume:
goodlogging.Log.Info("EXTRACT", "File skipped - this is not the first part of the RAR archive")
MultipartArchiving(firstPartExtractList, otherPartSkippedList, archiveDir, filePath)
except BaseException as ex:
goodlogging.Log.Info("EXTRACT", "Unable to extract - Exception: {0}".format(ex))
else:
dirPath = os.path.dirname(filePath)
fileExtracted = False
rarAuthentication = True
if rarArchive.needs_password():
if lastPassword and reuseLastPassword in (0, 1):
reuseLastPassword = CheckPasswordReuse(skipUserInput)
if lastPassword and reuseLastPassword in (1, 2):
rarArchive.setpassword(lastPassword)
else:
rarPassword = GetRarPassword(skipUserInput)
if rarPassword:
rarArchive.setpassword(rarPassword)
lastPassword = rarPassword
else:
rarAuthentication = False
if rarAuthentication:
for f in rarArchive.infolist():
if util.FileExtensionMatch(f.filename, fileFormatList):
goodlogging.Log.Info("EXTRACT", "Extracting file: {0}".format(f.filename))
extractPath = os.path.join(dirPath, f.filename)
targetPath = os.path.join(dirPath, os.path.basename(f.filename))
if os.path.isfile(targetPath):
goodlogging.Log.Info("EXTRACT", "Extraction skipped - file already exists at target: {0}".format(targetPath))
fileExtracted = True
elif os.path.isfile(extractPath):
goodlogging.Log.Info("EXTRACT", "Extraction skipped - file already exists at extract directory: {0}".format(extractPath))
fileExtracted = True
else:
fileExtracted = DoRarExtraction(rarArchive, f, dirPath)
if os.path.isfile(extractPath) and not os.path.isfile(targetPath):
os.rename(extractPath, targetPath)
util.RemoveEmptyDirectoryTree(os.path.dirname(extractPath))
if fileExtracted is True:
util.ArchiveProcessedFile(filePath, archiveDir)
try:
firstPartFileName = re.findall('(.+?)[.]part1[.]rar', filePath)[0]
except IndexError:
pass
else:
firstPartExtractList.append(firstPartFileName)
MultipartArchiving(firstPartExtractList, otherPartSkippedList, archiveDir)
finally:
goodlogging.Log.DecreaseIndent()
goodlogging.Log.DecreaseIndent() | def function[Extract, parameter[fileList, fileFormatList, archiveDir, skipUserInput]]:
constant[
Iterate through given file list and extract all files matching the file
format list from each RAR file. After sucessful extraction move RAR files to
archive directory.
Parameters
----------
fileList : list
List of files to attempt to extract.
fileFormatList : list
List of file formats to extract from each RAR archive.
archiveDir : string
Directory to move RAR files once extract is complete.
skipUserInput : boolean
Set to skip any potential user input (if a single option is available
it will be selected otherwise the user input will default to take no action).
]
call[name[goodlogging].Log.Info, parameter[constant[EXTRACT], constant[Extracting files from compressed archives]]]
call[name[goodlogging].Log.IncreaseIndent, parameter[]]
if compare[call[name[len], parameter[name[fileList]]] equal[==] constant[0]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[EXTRACT], constant[No files to extract]]]
call[name[goodlogging].Log.DecreaseIndent, parameter[]]
return[constant[None]]
variable[firstPartExtractList] assign[=] list[[]]
variable[otherPartSkippedList] assign[=] list[[]]
variable[lastPassword] assign[=] constant[False]
variable[reuseLastPassword] assign[=] constant[0]
for taget[name[filePath]] in starred[name[fileList]] begin[:]
call[name[goodlogging].Log.Info, parameter[constant[EXTRACT], call[constant[{0}].format, parameter[name[filePath]]]]]
call[name[goodlogging].Log.IncreaseIndent, parameter[]]
<ast.Try object at 0x7da1b287b3a0>
call[name[goodlogging].Log.DecreaseIndent, parameter[]] | keyword[def] identifier[Extract] ( identifier[fileList] , identifier[fileFormatList] , identifier[archiveDir] , identifier[skipUserInput] ):
literal[string]
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
identifier[goodlogging] . identifier[Log] . identifier[IncreaseIndent] ()
keyword[if] identifier[len] ( identifier[fileList] )== literal[int] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
identifier[goodlogging] . identifier[Log] . identifier[DecreaseIndent] ()
keyword[return] keyword[None]
identifier[firstPartExtractList] =[]
identifier[otherPartSkippedList] =[]
identifier[lastPassword] = keyword[False]
identifier[reuseLastPassword] = literal[int]
keyword[for] identifier[filePath] keyword[in] identifier[fileList] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[filePath] ))
identifier[goodlogging] . identifier[Log] . identifier[IncreaseIndent] ()
keyword[try] :
identifier[rarArchive] = identifier[rarfile] . identifier[RarFile] ( identifier[filePath] )
keyword[except] identifier[ImportError] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
keyword[except] identifier[rarfile] . identifier[NeedFirstVolume] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] )
identifier[MultipartArchiving] ( identifier[firstPartExtractList] , identifier[otherPartSkippedList] , identifier[archiveDir] , identifier[filePath] )
keyword[except] identifier[BaseException] keyword[as] identifier[ex] :
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[ex] ))
keyword[else] :
identifier[dirPath] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filePath] )
identifier[fileExtracted] = keyword[False]
identifier[rarAuthentication] = keyword[True]
keyword[if] identifier[rarArchive] . identifier[needs_password] ():
keyword[if] identifier[lastPassword] keyword[and] identifier[reuseLastPassword] keyword[in] ( literal[int] , literal[int] ):
identifier[reuseLastPassword] = identifier[CheckPasswordReuse] ( identifier[skipUserInput] )
keyword[if] identifier[lastPassword] keyword[and] identifier[reuseLastPassword] keyword[in] ( literal[int] , literal[int] ):
identifier[rarArchive] . identifier[setpassword] ( identifier[lastPassword] )
keyword[else] :
identifier[rarPassword] = identifier[GetRarPassword] ( identifier[skipUserInput] )
keyword[if] identifier[rarPassword] :
identifier[rarArchive] . identifier[setpassword] ( identifier[rarPassword] )
identifier[lastPassword] = identifier[rarPassword]
keyword[else] :
identifier[rarAuthentication] = keyword[False]
keyword[if] identifier[rarAuthentication] :
keyword[for] identifier[f] keyword[in] identifier[rarArchive] . identifier[infolist] ():
keyword[if] identifier[util] . identifier[FileExtensionMatch] ( identifier[f] . identifier[filename] , identifier[fileFormatList] ):
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[f] . identifier[filename] ))
identifier[extractPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirPath] , identifier[f] . identifier[filename] )
identifier[targetPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirPath] , identifier[os] . identifier[path] . identifier[basename] ( identifier[f] . identifier[filename] ))
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[targetPath] ):
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[targetPath] ))
identifier[fileExtracted] = keyword[True]
keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[extractPath] ):
identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[extractPath] ))
identifier[fileExtracted] = keyword[True]
keyword[else] :
identifier[fileExtracted] = identifier[DoRarExtraction] ( identifier[rarArchive] , identifier[f] , identifier[dirPath] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[extractPath] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[targetPath] ):
identifier[os] . identifier[rename] ( identifier[extractPath] , identifier[targetPath] )
identifier[util] . identifier[RemoveEmptyDirectoryTree] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[extractPath] ))
keyword[if] identifier[fileExtracted] keyword[is] keyword[True] :
identifier[util] . identifier[ArchiveProcessedFile] ( identifier[filePath] , identifier[archiveDir] )
keyword[try] :
identifier[firstPartFileName] = identifier[re] . identifier[findall] ( literal[string] , identifier[filePath] )[ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[pass]
keyword[else] :
identifier[firstPartExtractList] . identifier[append] ( identifier[firstPartFileName] )
identifier[MultipartArchiving] ( identifier[firstPartExtractList] , identifier[otherPartSkippedList] , identifier[archiveDir] )
keyword[finally] :
identifier[goodlogging] . identifier[Log] . identifier[DecreaseIndent] ()
identifier[goodlogging] . identifier[Log] . identifier[DecreaseIndent] () | def Extract(fileList, fileFormatList, archiveDir, skipUserInput):
"""
Iterate through given file list and extract all files matching the file
format list from each RAR file. After sucessful extraction move RAR files to
archive directory.
Parameters
----------
fileList : list
List of files to attempt to extract.
fileFormatList : list
List of file formats to extract from each RAR archive.
archiveDir : string
Directory to move RAR files once extract is complete.
skipUserInput : boolean
Set to skip any potential user input (if a single option is available
it will be selected otherwise the user input will default to take no action).
"""
goodlogging.Log.Info('EXTRACT', 'Extracting files from compressed archives')
goodlogging.Log.IncreaseIndent()
if len(fileList) == 0:
goodlogging.Log.Info('EXTRACT', 'No files to extract')
goodlogging.Log.DecreaseIndent()
return None # depends on [control=['if'], data=[]]
firstPartExtractList = []
otherPartSkippedList = []
lastPassword = False
reuseLastPassword = 0
for filePath in fileList:
goodlogging.Log.Info('EXTRACT', '{0}'.format(filePath))
goodlogging.Log.IncreaseIndent()
try:
rarArchive = rarfile.RarFile(filePath) # depends on [control=['try'], data=[]]
except ImportError:
goodlogging.Log.Info('EXTRACT', 'Unable to extract - Python needs the rarfile package to be installed (see README for more details)') # depends on [control=['except'], data=[]]
except rarfile.NeedFirstVolume:
goodlogging.Log.Info('EXTRACT', 'File skipped - this is not the first part of the RAR archive')
MultipartArchiving(firstPartExtractList, otherPartSkippedList, archiveDir, filePath) # depends on [control=['except'], data=[]]
except BaseException as ex:
goodlogging.Log.Info('EXTRACT', 'Unable to extract - Exception: {0}'.format(ex)) # depends on [control=['except'], data=['ex']]
else:
dirPath = os.path.dirname(filePath)
fileExtracted = False
rarAuthentication = True
if rarArchive.needs_password():
if lastPassword and reuseLastPassword in (0, 1):
reuseLastPassword = CheckPasswordReuse(skipUserInput) # depends on [control=['if'], data=[]]
if lastPassword and reuseLastPassword in (1, 2):
rarArchive.setpassword(lastPassword) # depends on [control=['if'], data=[]]
else:
rarPassword = GetRarPassword(skipUserInput)
if rarPassword:
rarArchive.setpassword(rarPassword)
lastPassword = rarPassword # depends on [control=['if'], data=[]]
else:
rarAuthentication = False # depends on [control=['if'], data=[]]
if rarAuthentication:
for f in rarArchive.infolist():
if util.FileExtensionMatch(f.filename, fileFormatList):
goodlogging.Log.Info('EXTRACT', 'Extracting file: {0}'.format(f.filename))
extractPath = os.path.join(dirPath, f.filename)
targetPath = os.path.join(dirPath, os.path.basename(f.filename))
if os.path.isfile(targetPath):
goodlogging.Log.Info('EXTRACT', 'Extraction skipped - file already exists at target: {0}'.format(targetPath))
fileExtracted = True # depends on [control=['if'], data=[]]
elif os.path.isfile(extractPath):
goodlogging.Log.Info('EXTRACT', 'Extraction skipped - file already exists at extract directory: {0}'.format(extractPath))
fileExtracted = True # depends on [control=['if'], data=[]]
else:
fileExtracted = DoRarExtraction(rarArchive, f, dirPath)
if os.path.isfile(extractPath) and (not os.path.isfile(targetPath)):
os.rename(extractPath, targetPath)
util.RemoveEmptyDirectoryTree(os.path.dirname(extractPath)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
if fileExtracted is True:
util.ArchiveProcessedFile(filePath, archiveDir)
try:
firstPartFileName = re.findall('(.+?)[.]part1[.]rar', filePath)[0] # depends on [control=['try'], data=[]]
except IndexError:
pass # depends on [control=['except'], data=[]]
else:
firstPartExtractList.append(firstPartFileName)
MultipartArchiving(firstPartExtractList, otherPartSkippedList, archiveDir) # depends on [control=['if'], data=[]]
finally:
goodlogging.Log.DecreaseIndent() # depends on [control=['for'], data=['filePath']]
goodlogging.Log.DecreaseIndent() |
def assign_user_policies(user, *policies_roles):
"""Assign a sequence of policies to a user (or the anonymous user is
``user`` is ``None``). (Also installed as ``assign_policies``
method on ``User`` model.
"""
clear_user_policies(user)
pset = PermissionSet.objects.by_policies_and_roles(policies_roles)
pset.refresh()
if user is None:
pset.anonymous_user = True
else:
pset.users.add(user)
pset.save()
cache.set(user_cache_key(user), None) | def function[assign_user_policies, parameter[user]]:
constant[Assign a sequence of policies to a user (or the anonymous user is
``user`` is ``None``). (Also installed as ``assign_policies``
method on ``User`` model.
]
call[name[clear_user_policies], parameter[name[user]]]
variable[pset] assign[=] call[name[PermissionSet].objects.by_policies_and_roles, parameter[name[policies_roles]]]
call[name[pset].refresh, parameter[]]
if compare[name[user] is constant[None]] begin[:]
name[pset].anonymous_user assign[=] constant[True]
call[name[pset].save, parameter[]]
call[name[cache].set, parameter[call[name[user_cache_key], parameter[name[user]]], constant[None]]] | keyword[def] identifier[assign_user_policies] ( identifier[user] ,* identifier[policies_roles] ):
literal[string]
identifier[clear_user_policies] ( identifier[user] )
identifier[pset] = identifier[PermissionSet] . identifier[objects] . identifier[by_policies_and_roles] ( identifier[policies_roles] )
identifier[pset] . identifier[refresh] ()
keyword[if] identifier[user] keyword[is] keyword[None] :
identifier[pset] . identifier[anonymous_user] = keyword[True]
keyword[else] :
identifier[pset] . identifier[users] . identifier[add] ( identifier[user] )
identifier[pset] . identifier[save] ()
identifier[cache] . identifier[set] ( identifier[user_cache_key] ( identifier[user] ), keyword[None] ) | def assign_user_policies(user, *policies_roles):
"""Assign a sequence of policies to a user (or the anonymous user is
``user`` is ``None``). (Also installed as ``assign_policies``
method on ``User`` model.
"""
clear_user_policies(user)
pset = PermissionSet.objects.by_policies_and_roles(policies_roles)
pset.refresh()
if user is None:
pset.anonymous_user = True # depends on [control=['if'], data=[]]
else:
pset.users.add(user)
pset.save()
cache.set(user_cache_key(user), None) |
def kill(self) -> None:
"""Kill ffmpeg job."""
self._proc.kill()
self._loop.run_in_executor(None, self._proc.communicate) | def function[kill, parameter[self]]:
constant[Kill ffmpeg job.]
call[name[self]._proc.kill, parameter[]]
call[name[self]._loop.run_in_executor, parameter[constant[None], name[self]._proc.communicate]] | keyword[def] identifier[kill] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[self] . identifier[_proc] . identifier[kill] ()
identifier[self] . identifier[_loop] . identifier[run_in_executor] ( keyword[None] , identifier[self] . identifier[_proc] . identifier[communicate] ) | def kill(self) -> None:
"""Kill ffmpeg job."""
self._proc.kill()
self._loop.run_in_executor(None, self._proc.communicate) |
def closestConnectedDistance(target, walls=None,
max_len_border_line=500,
max_n_path=100,
concentrate_every_n_pixel=1):
'''
returns an array with contains the closest distance from every pixel
the next position where target == 1
[walls] binary 2darray - e.g. walls in a labyrinth that have to be surrounded in order to get to the target
[target] binary 2darray - positions given by 1
[concentrate_every_n_pixel] often the distance of neighbour pixels is similar
to speed up calculation set this value to e.g. 3 to calculate only
the distance for every 3. pixel and interpolate in between
recommended are values up to 3-5
[max_len_border_line]
this function calculates distances travelled using region growth
e.g.
0123
1123
2223
3333
the last steps (e.g. for all steps 3 border_line=7) are stored in an array of limited
length defined in 'max_len_border_line'
[max_n_path]
how many paths are possible between every pixel and the target
only needed if fast==False
'''
c = concentrate_every_n_pixel
assert c >= 1
if walls is None:
walls = np.zeros_like(target, dtype=bool)
s = target.shape
dt = np.uint16
if max(target.shape) < 200:
dt = np.uint8
out = np.zeros((s[0] // c, s[1] // c), dtype=dt)
# temporary arrays:
growth = np.zeros_like(target, dtype=dt)
res = np.empty(shape=3, dtype=dt)
steps = np.empty(shape=(max_len_border_line, 2), dtype=dt)
new_steps = np.empty(shape=(max_len_border_line, 2), dtype=dt)
# run calculation:
_calc(growth, out, walls, target, steps, new_steps,
res, concentrate_every_n_pixel)
if c > 1:
# if concentrate_every_n_pixel > 1
# the resized output array
# will have wrong values close to the wall
# therefore substitute all wall value (-1)
# with an average of their closest neighbours
interpolate2dStructuredIDW(out, out == 0)
out = cv2.resize(out, s[::-1])
out[walls] = 0
return out | def function[closestConnectedDistance, parameter[target, walls, max_len_border_line, max_n_path, concentrate_every_n_pixel]]:
constant[
returns an array with contains the closest distance from every pixel
the next position where target == 1
[walls] binary 2darray - e.g. walls in a labyrinth that have to be surrounded in order to get to the target
[target] binary 2darray - positions given by 1
[concentrate_every_n_pixel] often the distance of neighbour pixels is similar
to speed up calculation set this value to e.g. 3 to calculate only
the distance for every 3. pixel and interpolate in between
recommended are values up to 3-5
[max_len_border_line]
this function calculates distances travelled using region growth
e.g.
0123
1123
2223
3333
the last steps (e.g. for all steps 3 border_line=7) are stored in an array of limited
length defined in 'max_len_border_line'
[max_n_path]
how many paths are possible between every pixel and the target
only needed if fast==False
]
variable[c] assign[=] name[concentrate_every_n_pixel]
assert[compare[name[c] greater_or_equal[>=] constant[1]]]
if compare[name[walls] is constant[None]] begin[:]
variable[walls] assign[=] call[name[np].zeros_like, parameter[name[target]]]
variable[s] assign[=] name[target].shape
variable[dt] assign[=] name[np].uint16
if compare[call[name[max], parameter[name[target].shape]] less[<] constant[200]] begin[:]
variable[dt] assign[=] name[np].uint8
variable[out] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da1b2346200>, <ast.BinOp object at 0x7da1b23459c0>]]]]
variable[growth] assign[=] call[name[np].zeros_like, parameter[name[target]]]
variable[res] assign[=] call[name[np].empty, parameter[]]
variable[steps] assign[=] call[name[np].empty, parameter[]]
variable[new_steps] assign[=] call[name[np].empty, parameter[]]
call[name[_calc], parameter[name[growth], name[out], name[walls], name[target], name[steps], name[new_steps], name[res], name[concentrate_every_n_pixel]]]
if compare[name[c] greater[>] constant[1]] begin[:]
call[name[interpolate2dStructuredIDW], parameter[name[out], compare[name[out] equal[==] constant[0]]]]
variable[out] assign[=] call[name[cv2].resize, parameter[name[out], call[name[s]][<ast.Slice object at 0x7da18fe916f0>]]]
call[name[out]][name[walls]] assign[=] constant[0]
return[name[out]] | keyword[def] identifier[closestConnectedDistance] ( identifier[target] , identifier[walls] = keyword[None] ,
identifier[max_len_border_line] = literal[int] ,
identifier[max_n_path] = literal[int] ,
identifier[concentrate_every_n_pixel] = literal[int] ):
literal[string]
identifier[c] = identifier[concentrate_every_n_pixel]
keyword[assert] identifier[c] >= literal[int]
keyword[if] identifier[walls] keyword[is] keyword[None] :
identifier[walls] = identifier[np] . identifier[zeros_like] ( identifier[target] , identifier[dtype] = identifier[bool] )
identifier[s] = identifier[target] . identifier[shape]
identifier[dt] = identifier[np] . identifier[uint16]
keyword[if] identifier[max] ( identifier[target] . identifier[shape] )< literal[int] :
identifier[dt] = identifier[np] . identifier[uint8]
identifier[out] = identifier[np] . identifier[zeros] (( identifier[s] [ literal[int] ]// identifier[c] , identifier[s] [ literal[int] ]// identifier[c] ), identifier[dtype] = identifier[dt] )
identifier[growth] = identifier[np] . identifier[zeros_like] ( identifier[target] , identifier[dtype] = identifier[dt] )
identifier[res] = identifier[np] . identifier[empty] ( identifier[shape] = literal[int] , identifier[dtype] = identifier[dt] )
identifier[steps] = identifier[np] . identifier[empty] ( identifier[shape] =( identifier[max_len_border_line] , literal[int] ), identifier[dtype] = identifier[dt] )
identifier[new_steps] = identifier[np] . identifier[empty] ( identifier[shape] =( identifier[max_len_border_line] , literal[int] ), identifier[dtype] = identifier[dt] )
identifier[_calc] ( identifier[growth] , identifier[out] , identifier[walls] , identifier[target] , identifier[steps] , identifier[new_steps] ,
identifier[res] , identifier[concentrate_every_n_pixel] )
keyword[if] identifier[c] > literal[int] :
identifier[interpolate2dStructuredIDW] ( identifier[out] , identifier[out] == literal[int] )
identifier[out] = identifier[cv2] . identifier[resize] ( identifier[out] , identifier[s] [::- literal[int] ])
identifier[out] [ identifier[walls] ]= literal[int]
keyword[return] identifier[out] | def closestConnectedDistance(target, walls=None, max_len_border_line=500, max_n_path=100, concentrate_every_n_pixel=1):
"""
returns an array with contains the closest distance from every pixel
the next position where target == 1
[walls] binary 2darray - e.g. walls in a labyrinth that have to be surrounded in order to get to the target
[target] binary 2darray - positions given by 1
[concentrate_every_n_pixel] often the distance of neighbour pixels is similar
to speed up calculation set this value to e.g. 3 to calculate only
the distance for every 3. pixel and interpolate in between
recommended are values up to 3-5
[max_len_border_line]
this function calculates distances travelled using region growth
e.g.
0123
1123
2223
3333
the last steps (e.g. for all steps 3 border_line=7) are stored in an array of limited
length defined in 'max_len_border_line'
[max_n_path]
how many paths are possible between every pixel and the target
only needed if fast==False
"""
c = concentrate_every_n_pixel
assert c >= 1
if walls is None:
walls = np.zeros_like(target, dtype=bool) # depends on [control=['if'], data=['walls']]
s = target.shape
dt = np.uint16
if max(target.shape) < 200:
dt = np.uint8 # depends on [control=['if'], data=[]]
out = np.zeros((s[0] // c, s[1] // c), dtype=dt) # temporary arrays:
growth = np.zeros_like(target, dtype=dt)
res = np.empty(shape=3, dtype=dt)
steps = np.empty(shape=(max_len_border_line, 2), dtype=dt)
new_steps = np.empty(shape=(max_len_border_line, 2), dtype=dt) # run calculation:
_calc(growth, out, walls, target, steps, new_steps, res, concentrate_every_n_pixel)
if c > 1: # if concentrate_every_n_pixel > 1
# the resized output array
# will have wrong values close to the wall
# therefore substitute all wall value (-1)
# with an average of their closest neighbours
interpolate2dStructuredIDW(out, out == 0)
out = cv2.resize(out, s[::-1])
out[walls] = 0 # depends on [control=['if'], data=[]]
return out |
def open(self):
"""This is the only way to open a file resource."""
self.__sf = _sftp_open(self.__sftp_session_int,
self.__filepath,
self.access_type_int,
self.__create_mode)
if self.access_type_is_append is True:
self.seek(self.filesize)
return SftpFileObject(self) | def function[open, parameter[self]]:
constant[This is the only way to open a file resource.]
name[self].__sf assign[=] call[name[_sftp_open], parameter[name[self].__sftp_session_int, name[self].__filepath, name[self].access_type_int, name[self].__create_mode]]
if compare[name[self].access_type_is_append is constant[True]] begin[:]
call[name[self].seek, parameter[name[self].filesize]]
return[call[name[SftpFileObject], parameter[name[self]]]] | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__sf] = identifier[_sftp_open] ( identifier[self] . identifier[__sftp_session_int] ,
identifier[self] . identifier[__filepath] ,
identifier[self] . identifier[access_type_int] ,
identifier[self] . identifier[__create_mode] )
keyword[if] identifier[self] . identifier[access_type_is_append] keyword[is] keyword[True] :
identifier[self] . identifier[seek] ( identifier[self] . identifier[filesize] )
keyword[return] identifier[SftpFileObject] ( identifier[self] ) | def open(self):
"""This is the only way to open a file resource."""
self.__sf = _sftp_open(self.__sftp_session_int, self.__filepath, self.access_type_int, self.__create_mode)
if self.access_type_is_append is True:
self.seek(self.filesize) # depends on [control=['if'], data=[]]
return SftpFileObject(self) |
def ensure_crops(self, *required_crops):
"""
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
args = [self.pk]+list(required_crops)
tasks.ensure_crops.apply_async(args=args, countdown=5)
else:
tasks.ensure_crops(None, *required_crops, asset=self) | def function[ensure_crops, parameter[self]]:
constant[
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
]
if call[name[self]._can_crop, parameter[]] begin[:]
if <ast.BoolOp object at 0x7da1b0bd0fd0> begin[:]
variable[args] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da18bcca380>]] + call[name[list], parameter[name[required_crops]]]]
call[name[tasks].ensure_crops.apply_async, parameter[]] | keyword[def] identifier[ensure_crops] ( identifier[self] ,* identifier[required_crops] ):
literal[string]
keyword[if] identifier[self] . identifier[_can_crop] ():
keyword[if] identifier[settings] . identifier[CELERY] keyword[or] identifier[settings] . identifier[USE_CELERY_DECORATOR] :
identifier[args] =[ identifier[self] . identifier[pk] ]+ identifier[list] ( identifier[required_crops] )
identifier[tasks] . identifier[ensure_crops] . identifier[apply_async] ( identifier[args] = identifier[args] , identifier[countdown] = literal[int] )
keyword[else] :
identifier[tasks] . identifier[ensure_crops] ( keyword[None] ,* identifier[required_crops] , identifier[asset] = identifier[self] ) | def ensure_crops(self, *required_crops):
"""
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
args = [self.pk] + list(required_crops)
tasks.ensure_crops.apply_async(args=args, countdown=5) # depends on [control=['if'], data=[]]
else:
tasks.ensure_crops(None, *required_crops, asset=self) # depends on [control=['if'], data=[]] |
def _write(self, session, openFile, replaceParamFile):
"""
Generic Time Series Write to File Method
"""
# Retrieve all time series
timeSeries = self.timeSeries
# Num TimeSeries
numTS = len(timeSeries)
# Transform into list of dictionaries for pivot tool
valList = []
for tsNum, ts in enumerate(timeSeries):
values = ts.values
for value in values:
valDict = {'time': value.simTime,
'tsNum': tsNum,
'value': value.value}
valList.append(valDict)
# Use pivot function (from lib) to pivot the values into
# a format that is easy to write.
result = pivot(valList, ('time',), ('tsNum',), 'value')
# Write lines
for line in result:
valString = ''
# Compile value string
for n in range(0, numTS):
val = '%.6f' % line[(n,)]
valString = '%s%s%s' % (
valString,
' ' * (13 - len(str(val))), # Fancy spacing trick
val)
openFile.write(' %.8f%s\n' % (line['time'], valString)) | def function[_write, parameter[self, session, openFile, replaceParamFile]]:
constant[
Generic Time Series Write to File Method
]
variable[timeSeries] assign[=] name[self].timeSeries
variable[numTS] assign[=] call[name[len], parameter[name[timeSeries]]]
variable[valList] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e6c50>, <ast.Name object at 0x7da20c6e6650>]]] in starred[call[name[enumerate], parameter[name[timeSeries]]]] begin[:]
variable[values] assign[=] name[ts].values
for taget[name[value]] in starred[name[values]] begin[:]
variable[valDict] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e5240>, <ast.Constant object at 0x7da20c6e6aa0>, <ast.Constant object at 0x7da20c6e6890>], [<ast.Attribute object at 0x7da20c6e7d90>, <ast.Name object at 0x7da20c6e5300>, <ast.Attribute object at 0x7da20c6e67a0>]]
call[name[valList].append, parameter[name[valDict]]]
variable[result] assign[=] call[name[pivot], parameter[name[valList], tuple[[<ast.Constant object at 0x7da20c6e5360>]], tuple[[<ast.Constant object at 0x7da20c6e55d0>]], constant[value]]]
for taget[name[line]] in starred[name[result]] begin[:]
variable[valString] assign[=] constant[]
for taget[name[n]] in starred[call[name[range], parameter[constant[0], name[numTS]]]] begin[:]
variable[val] assign[=] binary_operation[constant[%.6f] <ast.Mod object at 0x7da2590d6920> call[name[line]][tuple[[<ast.Name object at 0x7da20c6e7dc0>]]]]
variable[valString] assign[=] binary_operation[constant[%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2049632e0>, <ast.BinOp object at 0x7da204961d20>, <ast.Name object at 0x7da204961f00>]]]
call[name[openFile].write, parameter[binary_operation[constant[ %.8f%s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da204960430>, <ast.Name object at 0x7da204960820>]]]]] | keyword[def] identifier[_write] ( identifier[self] , identifier[session] , identifier[openFile] , identifier[replaceParamFile] ):
literal[string]
identifier[timeSeries] = identifier[self] . identifier[timeSeries]
identifier[numTS] = identifier[len] ( identifier[timeSeries] )
identifier[valList] =[]
keyword[for] identifier[tsNum] , identifier[ts] keyword[in] identifier[enumerate] ( identifier[timeSeries] ):
identifier[values] = identifier[ts] . identifier[values]
keyword[for] identifier[value] keyword[in] identifier[values] :
identifier[valDict] ={ literal[string] : identifier[value] . identifier[simTime] ,
literal[string] : identifier[tsNum] ,
literal[string] : identifier[value] . identifier[value] }
identifier[valList] . identifier[append] ( identifier[valDict] )
identifier[result] = identifier[pivot] ( identifier[valList] ,( literal[string] ,),( literal[string] ,), literal[string] )
keyword[for] identifier[line] keyword[in] identifier[result] :
identifier[valString] = literal[string]
keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[numTS] ):
identifier[val] = literal[string] % identifier[line] [( identifier[n] ,)]
identifier[valString] = literal[string] %(
identifier[valString] ,
literal[string] *( literal[int] - identifier[len] ( identifier[str] ( identifier[val] ))),
identifier[val] )
identifier[openFile] . identifier[write] ( literal[string] %( identifier[line] [ literal[string] ], identifier[valString] )) | def _write(self, session, openFile, replaceParamFile):
"""
Generic Time Series Write to File Method
"""
# Retrieve all time series
timeSeries = self.timeSeries
# Num TimeSeries
numTS = len(timeSeries)
# Transform into list of dictionaries for pivot tool
valList = []
for (tsNum, ts) in enumerate(timeSeries):
values = ts.values
for value in values:
valDict = {'time': value.simTime, 'tsNum': tsNum, 'value': value.value}
valList.append(valDict) # depends on [control=['for'], data=['value']] # depends on [control=['for'], data=[]]
# Use pivot function (from lib) to pivot the values into
# a format that is easy to write.
result = pivot(valList, ('time',), ('tsNum',), 'value')
# Write lines
for line in result:
valString = ''
# Compile value string
for n in range(0, numTS):
val = '%.6f' % line[n,] # Fancy spacing trick
valString = '%s%s%s' % (valString, ' ' * (13 - len(str(val))), val) # depends on [control=['for'], data=['n']]
openFile.write(' %.8f%s\n' % (line['time'], valString)) # depends on [control=['for'], data=['line']] |
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.get_arguments(self)
if self.args.hostGroupName is not None:
self.url_parameters = {"name": self.args.hostGroupName} | def function[get_arguments, parameter[self]]:
constant[
Extracts the specific arguments of this CLI
]
call[name[ApiCli].get_arguments, parameter[name[self]]]
if compare[name[self].args.hostGroupName is_not constant[None]] begin[:]
name[self].url_parameters assign[=] dictionary[[<ast.Constant object at 0x7da1b02102e0>], [<ast.Attribute object at 0x7da1b0212cb0>]] | keyword[def] identifier[get_arguments] ( identifier[self] ):
literal[string]
identifier[ApiCli] . identifier[get_arguments] ( identifier[self] )
keyword[if] identifier[self] . identifier[args] . identifier[hostGroupName] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[url_parameters] ={ literal[string] : identifier[self] . identifier[args] . identifier[hostGroupName] } | def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.get_arguments(self)
if self.args.hostGroupName is not None:
self.url_parameters = {'name': self.args.hostGroupName} # depends on [control=['if'], data=[]] |
def close(self):
"""Send CLOSE command to device."""
close_command = StandardSend(self._address,
COMMAND_LIGHT_OFF_0X13_0X00)
self._send_method(close_command, self._close_message_received) | def function[close, parameter[self]]:
constant[Send CLOSE command to device.]
variable[close_command] assign[=] call[name[StandardSend], parameter[name[self]._address, name[COMMAND_LIGHT_OFF_0X13_0X00]]]
call[name[self]._send_method, parameter[name[close_command], name[self]._close_message_received]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[close_command] = identifier[StandardSend] ( identifier[self] . identifier[_address] ,
identifier[COMMAND_LIGHT_OFF_0X13_0X00] )
identifier[self] . identifier[_send_method] ( identifier[close_command] , identifier[self] . identifier[_close_message_received] ) | def close(self):
"""Send CLOSE command to device."""
close_command = StandardSend(self._address, COMMAND_LIGHT_OFF_0X13_0X00)
self._send_method(close_command, self._close_message_received) |
def _parse_peer_address(self, config):
"""Scans the config block and parses the peer-address value
Args:
config (str): The config block to scan
Returns:
dict: A dict object that is intended to be merged into the
resource dict
"""
match = re.search(r'peer-address ([^\s]+)', config)
value = match.group(1) if match else None
return dict(peer_address=value) | def function[_parse_peer_address, parameter[self, config]]:
constant[Scans the config block and parses the peer-address value
Args:
config (str): The config block to scan
Returns:
dict: A dict object that is intended to be merged into the
resource dict
]
variable[match] assign[=] call[name[re].search, parameter[constant[peer-address ([^\s]+)], name[config]]]
variable[value] assign[=] <ast.IfExp object at 0x7da18dc984f0>
return[call[name[dict], parameter[]]] | keyword[def] identifier[_parse_peer_address] ( identifier[self] , identifier[config] ):
literal[string]
identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[config] )
identifier[value] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] identifier[match] keyword[else] keyword[None]
keyword[return] identifier[dict] ( identifier[peer_address] = identifier[value] ) | def _parse_peer_address(self, config):
"""Scans the config block and parses the peer-address value
Args:
config (str): The config block to scan
Returns:
dict: A dict object that is intended to be merged into the
resource dict
"""
match = re.search('peer-address ([^\\s]+)', config)
value = match.group(1) if match else None
return dict(peer_address=value) |
def _fully_random_weights(n_features, lam_scale, prng):
"""Generate a symmetric random matrix with zeros along the diagonal."""
weights = np.zeros((n_features, n_features))
n_off_diag = int((n_features ** 2 - n_features) / 2)
weights[np.triu_indices(n_features, k=1)] = 0.1 * lam_scale * prng.randn(
n_off_diag
) + (0.25 * lam_scale)
weights[weights < 0] = 0
weights = weights + weights.T
return weights | def function[_fully_random_weights, parameter[n_features, lam_scale, prng]]:
constant[Generate a symmetric random matrix with zeros along the diagonal.]
variable[weights] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da20c794c40>, <ast.Name object at 0x7da20c795630>]]]]
variable[n_off_diag] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[name[n_features] ** constant[2]] - name[n_features]] / constant[2]]]]
call[name[weights]][call[name[np].triu_indices, parameter[name[n_features]]]] assign[=] binary_operation[binary_operation[binary_operation[constant[0.1] * name[lam_scale]] * call[name[prng].randn, parameter[name[n_off_diag]]]] + binary_operation[constant[0.25] * name[lam_scale]]]
call[name[weights]][compare[name[weights] less[<] constant[0]]] assign[=] constant[0]
variable[weights] assign[=] binary_operation[name[weights] + name[weights].T]
return[name[weights]] | keyword[def] identifier[_fully_random_weights] ( identifier[n_features] , identifier[lam_scale] , identifier[prng] ):
literal[string]
identifier[weights] = identifier[np] . identifier[zeros] (( identifier[n_features] , identifier[n_features] ))
identifier[n_off_diag] = identifier[int] (( identifier[n_features] ** literal[int] - identifier[n_features] )/ literal[int] )
identifier[weights] [ identifier[np] . identifier[triu_indices] ( identifier[n_features] , identifier[k] = literal[int] )]= literal[int] * identifier[lam_scale] * identifier[prng] . identifier[randn] (
identifier[n_off_diag]
)+( literal[int] * identifier[lam_scale] )
identifier[weights] [ identifier[weights] < literal[int] ]= literal[int]
identifier[weights] = identifier[weights] + identifier[weights] . identifier[T]
keyword[return] identifier[weights] | def _fully_random_weights(n_features, lam_scale, prng):
"""Generate a symmetric random matrix with zeros along the diagonal."""
weights = np.zeros((n_features, n_features))
n_off_diag = int((n_features ** 2 - n_features) / 2)
weights[np.triu_indices(n_features, k=1)] = 0.1 * lam_scale * prng.randn(n_off_diag) + 0.25 * lam_scale
weights[weights < 0] = 0
weights = weights + weights.T
return weights |
def check_executables():
"""Check if all necessary / recommended executables are installed."""
print("\033[1mCheck executables\033[0m")
required_executables = [utils.get_nntoolkit()]
for executable in required_executables:
path = which(executable)
if path is None:
print("%s ... %sNOT%s found" % (executable, Bcolors.WARNING,
Bcolors.ENDC))
else:
print("%s ... %sfound%s at %s" % (executable, Bcolors.OKGREEN,
Bcolors.ENDC, path)) | def function[check_executables, parameter[]]:
constant[Check if all necessary / recommended executables are installed.]
call[name[print], parameter[constant[[1mCheck executables[0m]]]
variable[required_executables] assign[=] list[[<ast.Call object at 0x7da1b28b8160>]]
for taget[name[executable]] in starred[name[required_executables]] begin[:]
variable[path] assign[=] call[name[which], parameter[name[executable]]]
if compare[name[path] is constant[None]] begin[:]
call[name[print], parameter[binary_operation[constant[%s ... %sNOT%s found] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b28b8a60>, <ast.Attribute object at 0x7da1b28b9c30>, <ast.Attribute object at 0x7da1b28b8ca0>]]]]] | keyword[def] identifier[check_executables] ():
literal[string]
identifier[print] ( literal[string] )
identifier[required_executables] =[ identifier[utils] . identifier[get_nntoolkit] ()]
keyword[for] identifier[executable] keyword[in] identifier[required_executables] :
identifier[path] = identifier[which] ( identifier[executable] )
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[print] ( literal[string] %( identifier[executable] , identifier[Bcolors] . identifier[WARNING] ,
identifier[Bcolors] . identifier[ENDC] ))
keyword[else] :
identifier[print] ( literal[string] %( identifier[executable] , identifier[Bcolors] . identifier[OKGREEN] ,
identifier[Bcolors] . identifier[ENDC] , identifier[path] )) | def check_executables():
"""Check if all necessary / recommended executables are installed."""
print('\x1b[1mCheck executables\x1b[0m')
required_executables = [utils.get_nntoolkit()]
for executable in required_executables:
path = which(executable)
if path is None:
print('%s ... %sNOT%s found' % (executable, Bcolors.WARNING, Bcolors.ENDC)) # depends on [control=['if'], data=[]]
else:
print('%s ... %sfound%s at %s' % (executable, Bcolors.OKGREEN, Bcolors.ENDC, path)) # depends on [control=['for'], data=['executable']] |
def delete(self, pid, record, key):
"""Handle DELETE deposit file.
Permission required: `update_permission_factory`.
:param pid: Pid object (from url).
:param record: Record object resolved from the pid.
:param key: Unique identifier for the file in the deposit.
"""
try:
del record.files[str(key)]
record.commit()
db.session.commit()
return make_response('', 204)
except KeyError:
abort(404, 'The specified object does not exist or has already '
'been deleted.') | def function[delete, parameter[self, pid, record, key]]:
constant[Handle DELETE deposit file.
Permission required: `update_permission_factory`.
:param pid: Pid object (from url).
:param record: Record object resolved from the pid.
:param key: Unique identifier for the file in the deposit.
]
<ast.Try object at 0x7da1afe6f2e0> | keyword[def] identifier[delete] ( identifier[self] , identifier[pid] , identifier[record] , identifier[key] ):
literal[string]
keyword[try] :
keyword[del] identifier[record] . identifier[files] [ identifier[str] ( identifier[key] )]
identifier[record] . identifier[commit] ()
identifier[db] . identifier[session] . identifier[commit] ()
keyword[return] identifier[make_response] ( literal[string] , literal[int] )
keyword[except] identifier[KeyError] :
identifier[abort] ( literal[int] , literal[string]
literal[string] ) | def delete(self, pid, record, key):
"""Handle DELETE deposit file.
Permission required: `update_permission_factory`.
:param pid: Pid object (from url).
:param record: Record object resolved from the pid.
:param key: Unique identifier for the file in the deposit.
"""
try:
del record.files[str(key)]
record.commit()
db.session.commit()
return make_response('', 204) # depends on [control=['try'], data=[]]
except KeyError:
abort(404, 'The specified object does not exist or has already been deleted.') # depends on [control=['except'], data=[]] |
def register_eph_task(self, *args, **kwargs):
"""Register an electron-phonon task."""
kwargs["task_class"] = EphTask
return self.register_task(*args, **kwargs) | def function[register_eph_task, parameter[self]]:
constant[Register an electron-phonon task.]
call[name[kwargs]][constant[task_class]] assign[=] name[EphTask]
return[call[name[self].register_task, parameter[<ast.Starred object at 0x7da1b21861a0>]]] | keyword[def] identifier[register_eph_task] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[EphTask]
keyword[return] identifier[self] . identifier[register_task] (* identifier[args] ,** identifier[kwargs] ) | def register_eph_task(self, *args, **kwargs):
"""Register an electron-phonon task."""
kwargs['task_class'] = EphTask
return self.register_task(*args, **kwargs) |
def set_limits(self, limits):
"""
Set the limit data to the given list of limits. Limits are
specified as the raw msgpack string representing the limit.
Computes the checksum of the limits; if the checksum is
identical to the current one, no action is taken.
"""
# First task, build the checksum of the new limits
chksum = hashlib.md5() # sufficient for our purposes
for lim in limits:
chksum.update(lim)
new_sum = chksum.hexdigest()
# Now install it
with self.limit_lock:
if self.limit_sum == new_sum:
# No changes
return
self.limit_data = [msgpack.loads(lim) for lim in limits]
self.limit_sum = new_sum | def function[set_limits, parameter[self, limits]]:
constant[
Set the limit data to the given list of limits. Limits are
specified as the raw msgpack string representing the limit.
Computes the checksum of the limits; if the checksum is
identical to the current one, no action is taken.
]
variable[chksum] assign[=] call[name[hashlib].md5, parameter[]]
for taget[name[lim]] in starred[name[limits]] begin[:]
call[name[chksum].update, parameter[name[lim]]]
variable[new_sum] assign[=] call[name[chksum].hexdigest, parameter[]]
with name[self].limit_lock begin[:]
if compare[name[self].limit_sum equal[==] name[new_sum]] begin[:]
return[None]
name[self].limit_data assign[=] <ast.ListComp object at 0x7da204564af0>
name[self].limit_sum assign[=] name[new_sum] | keyword[def] identifier[set_limits] ( identifier[self] , identifier[limits] ):
literal[string]
identifier[chksum] = identifier[hashlib] . identifier[md5] ()
keyword[for] identifier[lim] keyword[in] identifier[limits] :
identifier[chksum] . identifier[update] ( identifier[lim] )
identifier[new_sum] = identifier[chksum] . identifier[hexdigest] ()
keyword[with] identifier[self] . identifier[limit_lock] :
keyword[if] identifier[self] . identifier[limit_sum] == identifier[new_sum] :
keyword[return]
identifier[self] . identifier[limit_data] =[ identifier[msgpack] . identifier[loads] ( identifier[lim] ) keyword[for] identifier[lim] keyword[in] identifier[limits] ]
identifier[self] . identifier[limit_sum] = identifier[new_sum] | def set_limits(self, limits):
"""
Set the limit data to the given list of limits. Limits are
specified as the raw msgpack string representing the limit.
Computes the checksum of the limits; if the checksum is
identical to the current one, no action is taken.
"""
# First task, build the checksum of the new limits
chksum = hashlib.md5() # sufficient for our purposes
for lim in limits:
chksum.update(lim) # depends on [control=['for'], data=['lim']]
new_sum = chksum.hexdigest()
# Now install it
with self.limit_lock:
if self.limit_sum == new_sum:
# No changes
return # depends on [control=['if'], data=[]]
self.limit_data = [msgpack.loads(lim) for lim in limits]
self.limit_sum = new_sum # depends on [control=['with'], data=[]] |
def update_pipeline(self, pipeline):
'''Updates a pipeline with the provided attributes.
Args:
key required identifier for the pipeline
pipeline StreakPipeline object
return (status code, pipeline_dict)
'''
#req sanity check
payload = None
if type(pipeline) is not StreakPipeline:
return requests.codes.bad_request, None
payload = pipeline.to_dict(rw = True)
try:
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline.attributes['pipelineKey']
])
except KeyError:
return requests.codes.bad_request, None
code, r_data = self._req('post', uri , json.dumps(payload))
return code, r_data | def function[update_pipeline, parameter[self, pipeline]]:
constant[Updates a pipeline with the provided attributes.
Args:
key required identifier for the pipeline
pipeline StreakPipeline object
return (status code, pipeline_dict)
]
variable[payload] assign[=] constant[None]
if compare[call[name[type], parameter[name[pipeline]]] is_not name[StreakPipeline]] begin[:]
return[tuple[[<ast.Attribute object at 0x7da1b26acf10>, <ast.Constant object at 0x7da1b26ac6a0>]]]
variable[payload] assign[=] call[name[pipeline].to_dict, parameter[]]
<ast.Try object at 0x7da1b15c11b0>
<ast.Tuple object at 0x7da1b15c00d0> assign[=] call[name[self]._req, parameter[constant[post], name[uri], call[name[json].dumps, parameter[name[payload]]]]]
return[tuple[[<ast.Name object at 0x7da1b15c1cf0>, <ast.Name object at 0x7da1b15c1bd0>]]] | keyword[def] identifier[update_pipeline] ( identifier[self] , identifier[pipeline] ):
literal[string]
identifier[payload] = keyword[None]
keyword[if] identifier[type] ( identifier[pipeline] ) keyword[is] keyword[not] identifier[StreakPipeline] :
keyword[return] identifier[requests] . identifier[codes] . identifier[bad_request] , keyword[None]
identifier[payload] = identifier[pipeline] . identifier[to_dict] ( identifier[rw] = keyword[True] )
keyword[try] :
identifier[uri] = literal[string] . identifier[join] ([
identifier[self] . identifier[api_uri] ,
identifier[self] . identifier[pipelines_suffix] ,
identifier[pipeline] . identifier[attributes] [ literal[string] ]
])
keyword[except] identifier[KeyError] :
keyword[return] identifier[requests] . identifier[codes] . identifier[bad_request] , keyword[None]
identifier[code] , identifier[r_data] = identifier[self] . identifier[_req] ( literal[string] , identifier[uri] , identifier[json] . identifier[dumps] ( identifier[payload] ))
keyword[return] identifier[code] , identifier[r_data] | def update_pipeline(self, pipeline):
"""Updates a pipeline with the provided attributes.
Args:
key required identifier for the pipeline
pipeline StreakPipeline object
return (status code, pipeline_dict)
""" #req sanity check
payload = None
if type(pipeline) is not StreakPipeline:
return (requests.codes.bad_request, None) # depends on [control=['if'], data=[]]
payload = pipeline.to_dict(rw=True)
try:
uri = '/'.join([self.api_uri, self.pipelines_suffix, pipeline.attributes['pipelineKey']]) # depends on [control=['try'], data=[]]
except KeyError:
return (requests.codes.bad_request, None) # depends on [control=['except'], data=[]]
(code, r_data) = self._req('post', uri, json.dumps(payload))
return (code, r_data) |
def nvmlDeviceGetVbiosVersion(handle):
r"""
/**
* Get VBIOS version of the device.
*
* For all products.
*
* The VBIOS version may change from time to time. It will not exceed 32 characters in length
* (including the NULL terminator). See \ref nvmlConstants::NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE.
*
* @param device The identifier of the target device
* @param version Reference to which to return the VBIOS version
* @param length The maximum allowed length of the string returned in \a version
*
* @return
* - \ref NVML_SUCCESS if \a version has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid, or \a version is NULL
* - \ref NVML_ERROR_INSUFFICIENT_SIZE if \a length is too small
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetVbiosVersion
"""
c_version = create_string_buffer(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE)
fn = _nvmlGetFunctionPointer("nvmlDeviceGetVbiosVersion")
ret = fn(handle, c_version, c_uint(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE))
_nvmlCheckReturn(ret)
return bytes_to_str(c_version.value) | def function[nvmlDeviceGetVbiosVersion, parameter[handle]]:
constant[
/**
* Get VBIOS version of the device.
*
* For all products.
*
* The VBIOS version may change from time to time. It will not exceed 32 characters in length
* (including the NULL terminator). See \ref nvmlConstants::NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE.
*
* @param device The identifier of the target device
* @param version Reference to which to return the VBIOS version
* @param length The maximum allowed length of the string returned in \a version
*
* @return
* - \ref NVML_SUCCESS if \a version has been set
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid, or \a version is NULL
* - \ref NVML_ERROR_INSUFFICIENT_SIZE if \a length is too small
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetVbiosVersion
]
variable[c_version] assign[=] call[name[create_string_buffer], parameter[name[NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE]]]
variable[fn] assign[=] call[name[_nvmlGetFunctionPointer], parameter[constant[nvmlDeviceGetVbiosVersion]]]
variable[ret] assign[=] call[name[fn], parameter[name[handle], name[c_version], call[name[c_uint], parameter[name[NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE]]]]]
call[name[_nvmlCheckReturn], parameter[name[ret]]]
return[call[name[bytes_to_str], parameter[name[c_version].value]]] | keyword[def] identifier[nvmlDeviceGetVbiosVersion] ( identifier[handle] ):
literal[string]
identifier[c_version] = identifier[create_string_buffer] ( identifier[NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE] )
identifier[fn] = identifier[_nvmlGetFunctionPointer] ( literal[string] )
identifier[ret] = identifier[fn] ( identifier[handle] , identifier[c_version] , identifier[c_uint] ( identifier[NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE] ))
identifier[_nvmlCheckReturn] ( identifier[ret] )
keyword[return] identifier[bytes_to_str] ( identifier[c_version] . identifier[value] ) | def nvmlDeviceGetVbiosVersion(handle):
"""
/**
* Get VBIOS version of the device.
*
* For all products.
*
* The VBIOS version may change from time to time. It will not exceed 32 characters in length
* (including the NULL terminator). See \\ref nvmlConstants::NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE.
*
* @param device The identifier of the target device
* @param version Reference to which to return the VBIOS version
* @param length The maximum allowed length of the string returned in \\a version
*
* @return
* - \\ref NVML_SUCCESS if \\a version has been set
* - \\ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \\ref NVML_ERROR_INVALID_ARGUMENT if \\a device is invalid, or \\a version is NULL
* - \\ref NVML_ERROR_INSUFFICIENT_SIZE if \\a length is too small
* - \\ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \\ref NVML_ERROR_UNKNOWN on any unexpected error
*/
nvmlReturn_t DECLDIR nvmlDeviceGetVbiosVersion
"""
c_version = create_string_buffer(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE)
fn = _nvmlGetFunctionPointer('nvmlDeviceGetVbiosVersion')
ret = fn(handle, c_version, c_uint(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE))
_nvmlCheckReturn(ret)
return bytes_to_str(c_version.value) |
def last_kstp_from_kper(hds,kper):
""" function to find the last time step (kstp) for a
give stress period (kper) in a modflow head save file.
Parameters
----------
hds : flopy.utils.HeadFile
kper : int
the zero-index stress period number
Returns
-------
kstp : int
the zero-based last time step during stress period
kper in the head save file
"""
#find the last kstp with this kper
kstp = -1
for kkstp,kkper in hds.kstpkper:
if kkper == kper+1 and kkstp > kstp:
kstp = kkstp
if kstp == -1:
raise Exception("kstp not found for kper {0}".format(kper))
kstp -= 1
return kstp | def function[last_kstp_from_kper, parameter[hds, kper]]:
constant[ function to find the last time step (kstp) for a
give stress period (kper) in a modflow head save file.
Parameters
----------
hds : flopy.utils.HeadFile
kper : int
the zero-index stress period number
Returns
-------
kstp : int
the zero-based last time step during stress period
kper in the head save file
]
variable[kstp] assign[=] <ast.UnaryOp object at 0x7da1b23c6a10>
for taget[tuple[[<ast.Name object at 0x7da1b23c6ad0>, <ast.Name object at 0x7da1b23c6b00>]]] in starred[name[hds].kstpkper] begin[:]
if <ast.BoolOp object at 0x7da1b23c6bc0> begin[:]
variable[kstp] assign[=] name[kkstp]
if compare[name[kstp] equal[==] <ast.UnaryOp object at 0x7da1b23c6e90>] begin[:]
<ast.Raise object at 0x7da1b23c6ef0>
<ast.AugAssign object at 0x7da1b23c78e0>
return[name[kstp]] | keyword[def] identifier[last_kstp_from_kper] ( identifier[hds] , identifier[kper] ):
literal[string]
identifier[kstp] =- literal[int]
keyword[for] identifier[kkstp] , identifier[kkper] keyword[in] identifier[hds] . identifier[kstpkper] :
keyword[if] identifier[kkper] == identifier[kper] + literal[int] keyword[and] identifier[kkstp] > identifier[kstp] :
identifier[kstp] = identifier[kkstp]
keyword[if] identifier[kstp] ==- literal[int] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[kper] ))
identifier[kstp] -= literal[int]
keyword[return] identifier[kstp] | def last_kstp_from_kper(hds, kper):
""" function to find the last time step (kstp) for a
give stress period (kper) in a modflow head save file.
Parameters
----------
hds : flopy.utils.HeadFile
kper : int
the zero-index stress period number
Returns
-------
kstp : int
the zero-based last time step during stress period
kper in the head save file
"""
#find the last kstp with this kper
kstp = -1
for (kkstp, kkper) in hds.kstpkper:
if kkper == kper + 1 and kkstp > kstp:
kstp = kkstp # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if kstp == -1:
raise Exception('kstp not found for kper {0}'.format(kper)) # depends on [control=['if'], data=[]]
kstp -= 1
return kstp |
def _DownloadTS05Data(Overwrite=False):
'''
This function will try to download all existing TS05 archives and
extract them in $GEOPACK_PATH/tab.
'''
Year = 1995
Cont = True
OutPath = Globals.DataPath+'tab/'
cmd0 = 'wget -nv --show-progress '
cmd0 += 'http://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff/{:4d}_OMNI_5m_with_TS05_variables.zip'
cmd0 += ' -O ' + OutPath + '{:04d}.zip'
cmd1 = 'unzip ' + OutPath + '{:04d}.zip -d ' + OutPath
cmd2 = 'rm -v '+ OutPath + '{:04d}.zip'
cmd3 = 'mv -v ' + OutPath + '{:04d}_OMNI_5m_with_TS05_variables.dat '
cmd3 += OutPath + '{:04d}.tab'
files = []
while Cont:
if Overwrite or (not os.path.isfile(Globals.DataPath+'tab/{:04d}.tab'.format(Year))):
ret = os.system(cmd0.format(Year,Year))
if ret == 0:
#extract file
os.system(cmd1.format(Year))
#delete archive
os.system(cmd2.format(Year))
#rename tab
os.system(cmd3.format(Year,Year))
files.append(OutPath+'{:04}.tab'.format(Year))
else:
#stop loop
os.system(cmd2.format(Year))
Cont = False
Year += 1 | def function[_DownloadTS05Data, parameter[Overwrite]]:
constant[
This function will try to download all existing TS05 archives and
extract them in $GEOPACK_PATH/tab.
]
variable[Year] assign[=] constant[1995]
variable[Cont] assign[=] constant[True]
variable[OutPath] assign[=] binary_operation[name[Globals].DataPath + constant[tab/]]
variable[cmd0] assign[=] constant[wget -nv --show-progress ]
<ast.AugAssign object at 0x7da18bcc84f0>
<ast.AugAssign object at 0x7da18bcc98d0>
variable[cmd1] assign[=] binary_operation[binary_operation[binary_operation[constant[unzip ] + name[OutPath]] + constant[{:04d}.zip -d ]] + name[OutPath]]
variable[cmd2] assign[=] binary_operation[binary_operation[constant[rm -v ] + name[OutPath]] + constant[{:04d}.zip]]
variable[cmd3] assign[=] binary_operation[binary_operation[constant[mv -v ] + name[OutPath]] + constant[{:04d}_OMNI_5m_with_TS05_variables.dat ]]
<ast.AugAssign object at 0x7da207f9ae30>
variable[files] assign[=] list[[]]
while name[Cont] begin[:]
if <ast.BoolOp object at 0x7da18fe91660> begin[:]
variable[ret] assign[=] call[name[os].system, parameter[call[name[cmd0].format, parameter[name[Year], name[Year]]]]]
if compare[name[ret] equal[==] constant[0]] begin[:]
call[name[os].system, parameter[call[name[cmd1].format, parameter[name[Year]]]]]
call[name[os].system, parameter[call[name[cmd2].format, parameter[name[Year]]]]]
call[name[os].system, parameter[call[name[cmd3].format, parameter[name[Year], name[Year]]]]]
call[name[files].append, parameter[binary_operation[name[OutPath] + call[constant[{:04}.tab].format, parameter[name[Year]]]]]]
<ast.AugAssign object at 0x7da1b0a81c00> | keyword[def] identifier[_DownloadTS05Data] ( identifier[Overwrite] = keyword[False] ):
literal[string]
identifier[Year] = literal[int]
identifier[Cont] = keyword[True]
identifier[OutPath] = identifier[Globals] . identifier[DataPath] + literal[string]
identifier[cmd0] = literal[string]
identifier[cmd0] += literal[string]
identifier[cmd0] += literal[string] + identifier[OutPath] + literal[string]
identifier[cmd1] = literal[string] + identifier[OutPath] + literal[string] + identifier[OutPath]
identifier[cmd2] = literal[string] + identifier[OutPath] + literal[string]
identifier[cmd3] = literal[string] + identifier[OutPath] + literal[string]
identifier[cmd3] += identifier[OutPath] + literal[string]
identifier[files] =[]
keyword[while] identifier[Cont] :
keyword[if] identifier[Overwrite] keyword[or] ( keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[Globals] . identifier[DataPath] + literal[string] . identifier[format] ( identifier[Year] ))):
identifier[ret] = identifier[os] . identifier[system] ( identifier[cmd0] . identifier[format] ( identifier[Year] , identifier[Year] ))
keyword[if] identifier[ret] == literal[int] :
identifier[os] . identifier[system] ( identifier[cmd1] . identifier[format] ( identifier[Year] ))
identifier[os] . identifier[system] ( identifier[cmd2] . identifier[format] ( identifier[Year] ))
identifier[os] . identifier[system] ( identifier[cmd3] . identifier[format] ( identifier[Year] , identifier[Year] ))
identifier[files] . identifier[append] ( identifier[OutPath] + literal[string] . identifier[format] ( identifier[Year] ))
keyword[else] :
identifier[os] . identifier[system] ( identifier[cmd2] . identifier[format] ( identifier[Year] ))
identifier[Cont] = keyword[False]
identifier[Year] += literal[int] | def _DownloadTS05Data(Overwrite=False):
"""
This function will try to download all existing TS05 archives and
extract them in $GEOPACK_PATH/tab.
"""
Year = 1995
Cont = True
OutPath = Globals.DataPath + 'tab/'
cmd0 = 'wget -nv --show-progress '
cmd0 += 'http://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff/{:4d}_OMNI_5m_with_TS05_variables.zip'
cmd0 += ' -O ' + OutPath + '{:04d}.zip'
cmd1 = 'unzip ' + OutPath + '{:04d}.zip -d ' + OutPath
cmd2 = 'rm -v ' + OutPath + '{:04d}.zip'
cmd3 = 'mv -v ' + OutPath + '{:04d}_OMNI_5m_with_TS05_variables.dat '
cmd3 += OutPath + '{:04d}.tab'
files = []
while Cont:
if Overwrite or not os.path.isfile(Globals.DataPath + 'tab/{:04d}.tab'.format(Year)):
ret = os.system(cmd0.format(Year, Year))
if ret == 0: #extract file
os.system(cmd1.format(Year)) #delete archive
os.system(cmd2.format(Year)) #rename tab
os.system(cmd3.format(Year, Year))
files.append(OutPath + '{:04}.tab'.format(Year)) # depends on [control=['if'], data=[]]
else: #stop loop
os.system(cmd2.format(Year))
Cont = False # depends on [control=['if'], data=[]]
Year += 1 # depends on [control=['while'], data=[]] |
def GenerateConfigFile(load_hook, dump_hook, **kwargs) -> ConfigFile:
"""
Generates a ConfigFile object using the specified hooks.
These hooks should be functions, and have one argument.
When a hook is called, the ConfigFile object is passed to it. Use this to load your data from the fd object, or request, or whatever.
This returns a ConfigFile object.
"""
def ConfigFileGenerator(filename, safe_load: bool=True):
cfg = ConfigFile(fd=filename, load_hook=load_hook, dump_hook=dump_hook, safe_load=safe_load, **kwargs)
return cfg
return ConfigFileGenerator | def function[GenerateConfigFile, parameter[load_hook, dump_hook]]:
constant[
Generates a ConfigFile object using the specified hooks.
These hooks should be functions, and have one argument.
When a hook is called, the ConfigFile object is passed to it. Use this to load your data from the fd object, or request, or whatever.
This returns a ConfigFile object.
]
def function[ConfigFileGenerator, parameter[filename, safe_load]]:
variable[cfg] assign[=] call[name[ConfigFile], parameter[]]
return[name[cfg]]
return[name[ConfigFileGenerator]] | keyword[def] identifier[GenerateConfigFile] ( identifier[load_hook] , identifier[dump_hook] ,** identifier[kwargs] )-> identifier[ConfigFile] :
literal[string]
keyword[def] identifier[ConfigFileGenerator] ( identifier[filename] , identifier[safe_load] : identifier[bool] = keyword[True] ):
identifier[cfg] = identifier[ConfigFile] ( identifier[fd] = identifier[filename] , identifier[load_hook] = identifier[load_hook] , identifier[dump_hook] = identifier[dump_hook] , identifier[safe_load] = identifier[safe_load] ,** identifier[kwargs] )
keyword[return] identifier[cfg]
keyword[return] identifier[ConfigFileGenerator] | def GenerateConfigFile(load_hook, dump_hook, **kwargs) -> ConfigFile:
"""
Generates a ConfigFile object using the specified hooks.
These hooks should be functions, and have one argument.
When a hook is called, the ConfigFile object is passed to it. Use this to load your data from the fd object, or request, or whatever.
This returns a ConfigFile object.
"""
def ConfigFileGenerator(filename, safe_load: bool=True):
cfg = ConfigFile(fd=filename, load_hook=load_hook, dump_hook=dump_hook, safe_load=safe_load, **kwargs)
return cfg
return ConfigFileGenerator |
def computers(self, base_dn, samaccountnames=(), attributes=()):
"""Gathers a list of ADComputer objects
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of computer names for which objects will be
created, defaults to all computers if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADComputer objects
:rtype: list
"""
ad_computers = []
search_filter = '(&(objectClass=computer){0})'
# If no samaccountnames specified, filter will pull all computer objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
if len(samaccountnames) == 1:
computer_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\
format(computer) for computer
in samaccountnames]))
search_filter = search_filter.format(computer_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adc = self._object_factory(search_result)
ad_computers.append(adc)
return ad_computers | def function[computers, parameter[self, base_dn, samaccountnames, attributes]]:
constant[Gathers a list of ADComputer objects
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of computer names for which objects will be
created, defaults to all computers if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADComputer objects
:rtype: list
]
variable[ad_computers] assign[=] list[[]]
variable[search_filter] assign[=] constant[(&(objectClass=computer){0})]
if <ast.UnaryOp object at 0x7da1b0a22a70> begin[:]
variable[search_filter] assign[=] call[name[search_filter].format, parameter[constant[(sAMAccountName=*)]]]
call[name[logging].debug, parameter[constant[%s Search filter: %s], name[self].__class__.__name__, name[search_filter]]]
variable[results] assign[=] call[name[self].adq.search, parameter[name[base_dn], name[search_filter], name[attributes]]]
for taget[name[search_result]] in starred[name[results]] begin[:]
variable[adc] assign[=] call[name[self]._object_factory, parameter[name[search_result]]]
call[name[ad_computers].append, parameter[name[adc]]]
return[name[ad_computers]] | keyword[def] identifier[computers] ( identifier[self] , identifier[base_dn] , identifier[samaccountnames] =(), identifier[attributes] =()):
literal[string]
identifier[ad_computers] =[]
identifier[search_filter] = literal[string]
keyword[if] keyword[not] identifier[samaccountnames] :
identifier[search_filter] = identifier[search_filter] . identifier[format] ( literal[string] )
keyword[else] :
keyword[if] identifier[len] ( identifier[samaccountnames] )== literal[int] :
identifier[computer_names] = literal[string] . identifier[format] ( identifier[samaccountnames] [ literal[int] ])
keyword[else] :
identifier[computer_names] = literal[string] . identifier[format] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[computer] ) keyword[for] identifier[computer]
keyword[in] identifier[samaccountnames] ]))
identifier[search_filter] = identifier[search_filter] . identifier[format] ( identifier[computer_names] )
identifier[logging] . identifier[debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[search_filter] )
identifier[results] = identifier[self] . identifier[adq] . identifier[search] ( identifier[base_dn] , identifier[search_filter] , identifier[attributes] )
keyword[for] identifier[search_result] keyword[in] identifier[results] :
identifier[adc] = identifier[self] . identifier[_object_factory] ( identifier[search_result] )
identifier[ad_computers] . identifier[append] ( identifier[adc] )
keyword[return] identifier[ad_computers] | def computers(self, base_dn, samaccountnames=(), attributes=()):
"""Gathers a list of ADComputer objects
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of computer names for which objects will be
created, defaults to all computers if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADComputer objects
:rtype: list
"""
ad_computers = []
search_filter = '(&(objectClass=computer){0})'
# If no samaccountnames specified, filter will pull all computer objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)') # depends on [control=['if'], data=[]]
else:
if len(samaccountnames) == 1:
computer_names = '(sAMAccountName={0})'.format(samaccountnames[0]) # depends on [control=['if'], data=[]]
else:
computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(computer) for computer in samaccountnames]))
search_filter = search_filter.format(computer_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adc = self._object_factory(search_result)
ad_computers.append(adc) # depends on [control=['for'], data=['search_result']]
return ad_computers |
def remove_multizone(self, group_uuid):
""" Stop managing a group """
group_uuid = str(group_uuid)
group = self._groups.pop(group_uuid, None)
# Inform all group members that they are no longer members
if group is not None:
group['listener']._mz.reset_members() # noqa: E501 pylint: disable=protected-access
for member in self._casts.values():
member['groups'].discard(group_uuid) | def function[remove_multizone, parameter[self, group_uuid]]:
constant[ Stop managing a group ]
variable[group_uuid] assign[=] call[name[str], parameter[name[group_uuid]]]
variable[group] assign[=] call[name[self]._groups.pop, parameter[name[group_uuid], constant[None]]]
if compare[name[group] is_not constant[None]] begin[:]
call[call[name[group]][constant[listener]]._mz.reset_members, parameter[]]
for taget[name[member]] in starred[call[name[self]._casts.values, parameter[]]] begin[:]
call[call[name[member]][constant[groups]].discard, parameter[name[group_uuid]]] | keyword[def] identifier[remove_multizone] ( identifier[self] , identifier[group_uuid] ):
literal[string]
identifier[group_uuid] = identifier[str] ( identifier[group_uuid] )
identifier[group] = identifier[self] . identifier[_groups] . identifier[pop] ( identifier[group_uuid] , keyword[None] )
keyword[if] identifier[group] keyword[is] keyword[not] keyword[None] :
identifier[group] [ literal[string] ]. identifier[_mz] . identifier[reset_members] ()
keyword[for] identifier[member] keyword[in] identifier[self] . identifier[_casts] . identifier[values] ():
identifier[member] [ literal[string] ]. identifier[discard] ( identifier[group_uuid] ) | def remove_multizone(self, group_uuid):
""" Stop managing a group """
group_uuid = str(group_uuid)
group = self._groups.pop(group_uuid, None)
# Inform all group members that they are no longer members
if group is not None:
group['listener']._mz.reset_members() # noqa: E501 pylint: disable=protected-access # depends on [control=['if'], data=['group']]
for member in self._casts.values():
member['groups'].discard(group_uuid) # depends on [control=['for'], data=['member']] |
def fixed_point_quantize(x, sign=True, n=8, delta=2**-4, quantize=True, ste_fine_grained=True, outputs=None):
r"""Fixed Point Quantize
Args:
x (Variable): An input variable.
sign (bool): Indicate the signed number or the unsigned number. Default is true.
n (int): Bit width used. Note that `sign` consumes one bit. :math:`n-1` is used for number representation in `signed` case.
delta (float): Step size.
quantize (bool): If true, quantize input, otherwise not.
ste_fine_grained (bool): If true, STE is not 1.
Returns:
~nnabla.Variable: N-D array.
See Also:
``nnabla.function_bases.fixed_point_quantize``.
In the forward pass,
.. math::
\begin{equation}
q_i= \left\{
\begin{array}{ll}
max & if \ \ \ x_i > max \\
sign(x_i) \times floor(|x_i| \delta^{-1} + 2^{-1}) \times \delta & if \ \ min \le x_i \le max \\
min & if \ \ x_i < min \\
\end{array} \right.,
\end{equation}
where :math:`\delta` is the step size,
:math:`(min, max) :=(- (2^{n-1} - 1)\delta, (2^{n-1} - 1)\delta)` if :math:`sign` is true,
:math:`(min, max) := (0, (2^n - 1) \delta)` otherwise, and
:math:`n` is the total bit-width used.
In the backward pass when using `ste_fine_grained` as false,
.. math::
\begin{equation}
\frac{\partial q_i}{\partial x_i} = 1.
\end{equation}
In the backward pass when using `ste_fine_grained` as true,
.. math::
\begin{equation}
\frac{\partial q_i}{\partial x_i}= \left\{
\begin{array}{ll}
0 & if \ \ \ x_i > max \\
1 & if \ \ min \le x_i \le max \\
0 & if \ \ x_i < min \\
\end{array} \right..
\end{equation}
.. note::
Quantized values are stored as floating point number, since this function is for simulation purposes.
"""
from .function_bases import fixed_point_quantize as fixed_point_quantize_base
if not quantize:
return x
return fixed_point_quantize_base(x, sign, n, delta, ste_fine_grained, outputs=outputs) | def function[fixed_point_quantize, parameter[x, sign, n, delta, quantize, ste_fine_grained, outputs]]:
constant[Fixed Point Quantize
Args:
x (Variable): An input variable.
sign (bool): Indicate the signed number or the unsigned number. Default is true.
n (int): Bit width used. Note that `sign` consumes one bit. :math:`n-1` is used for number representation in `signed` case.
delta (float): Step size.
quantize (bool): If true, quantize input, otherwise not.
ste_fine_grained (bool): If true, STE is not 1.
Returns:
~nnabla.Variable: N-D array.
See Also:
``nnabla.function_bases.fixed_point_quantize``.
In the forward pass,
.. math::
\begin{equation}
q_i= \left\{
\begin{array}{ll}
max & if \ \ \ x_i > max \\
sign(x_i) \times floor(|x_i| \delta^{-1} + 2^{-1}) \times \delta & if \ \ min \le x_i \le max \\
min & if \ \ x_i < min \\
\end{array} \right.,
\end{equation}
where :math:`\delta` is the step size,
:math:`(min, max) :=(- (2^{n-1} - 1)\delta, (2^{n-1} - 1)\delta)` if :math:`sign` is true,
:math:`(min, max) := (0, (2^n - 1) \delta)` otherwise, and
:math:`n` is the total bit-width used.
In the backward pass when using `ste_fine_grained` as false,
.. math::
\begin{equation}
\frac{\partial q_i}{\partial x_i} = 1.
\end{equation}
In the backward pass when using `ste_fine_grained` as true,
.. math::
\begin{equation}
\frac{\partial q_i}{\partial x_i}= \left\{
\begin{array}{ll}
0 & if \ \ \ x_i > max \\
1 & if \ \ min \le x_i \le max \\
0 & if \ \ x_i < min \\
\end{array} \right..
\end{equation}
.. note::
Quantized values are stored as floating point number, since this function is for simulation purposes.
]
from relative_module[function_bases] import module[fixed_point_quantize]
if <ast.UnaryOp object at 0x7da18f812800> begin[:]
return[name[x]]
return[call[name[fixed_point_quantize_base], parameter[name[x], name[sign], name[n], name[delta], name[ste_fine_grained]]]] | keyword[def] identifier[fixed_point_quantize] ( identifier[x] , identifier[sign] = keyword[True] , identifier[n] = literal[int] , identifier[delta] = literal[int] **- literal[int] , identifier[quantize] = keyword[True] , identifier[ste_fine_grained] = keyword[True] , identifier[outputs] = keyword[None] ):
literal[string]
keyword[from] . identifier[function_bases] keyword[import] identifier[fixed_point_quantize] keyword[as] identifier[fixed_point_quantize_base]
keyword[if] keyword[not] identifier[quantize] :
keyword[return] identifier[x]
keyword[return] identifier[fixed_point_quantize_base] ( identifier[x] , identifier[sign] , identifier[n] , identifier[delta] , identifier[ste_fine_grained] , identifier[outputs] = identifier[outputs] ) | def fixed_point_quantize(x, sign=True, n=8, delta=2 ** (-4), quantize=True, ste_fine_grained=True, outputs=None):
"""Fixed Point Quantize
Args:
x (Variable): An input variable.
sign (bool): Indicate the signed number or the unsigned number. Default is true.
n (int): Bit width used. Note that `sign` consumes one bit. :math:`n-1` is used for number representation in `signed` case.
delta (float): Step size.
quantize (bool): If true, quantize input, otherwise not.
ste_fine_grained (bool): If true, STE is not 1.
Returns:
~nnabla.Variable: N-D array.
See Also:
``nnabla.function_bases.fixed_point_quantize``.
In the forward pass,
.. math::
\\begin{equation}
q_i= \\left\\{
\\begin{array}{ll}
max & if \\ \\ \\ x_i > max \\\\
sign(x_i) \\times floor(|x_i| \\delta^{-1} + 2^{-1}) \\times \\delta & if \\ \\ min \\le x_i \\le max \\\\
min & if \\ \\ x_i < min \\\\
\\end{array} \\right.,
\\end{equation}
where :math:`\\delta` is the step size,
:math:`(min, max) :=(- (2^{n-1} - 1)\\delta, (2^{n-1} - 1)\\delta)` if :math:`sign` is true,
:math:`(min, max) := (0, (2^n - 1) \\delta)` otherwise, and
:math:`n` is the total bit-width used.
In the backward pass when using `ste_fine_grained` as false,
.. math::
\\begin{equation}
\\frac{\\partial q_i}{\\partial x_i} = 1.
\\end{equation}
In the backward pass when using `ste_fine_grained` as true,
.. math::
\\begin{equation}
\\frac{\\partial q_i}{\\partial x_i}= \\left\\{
\\begin{array}{ll}
0 & if \\ \\ \\ x_i > max \\\\
1 & if \\ \\ min \\le x_i \\le max \\\\
0 & if \\ \\ x_i < min \\\\
\\end{array} \\right..
\\end{equation}
.. note::
Quantized values are stored as floating point number, since this function is for simulation purposes.
"""
from .function_bases import fixed_point_quantize as fixed_point_quantize_base
if not quantize:
return x # depends on [control=['if'], data=[]]
return fixed_point_quantize_base(x, sign, n, delta, ste_fine_grained, outputs=outputs) |
def rm_blanks(self):
"""
Get rid of parameters that has no value.
"""
_blanks = [k for k in self._dict.keys() if not self._dict[k]]
for key in _blanks:
del self._dict[key] | def function[rm_blanks, parameter[self]]:
constant[
Get rid of parameters that has no value.
]
variable[_blanks] assign[=] <ast.ListComp object at 0x7da20c6c6e60>
for taget[name[key]] in starred[name[_blanks]] begin[:]
<ast.Delete object at 0x7da20c6c50c0> | keyword[def] identifier[rm_blanks] ( identifier[self] ):
literal[string]
identifier[_blanks] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_dict] . identifier[keys] () keyword[if] keyword[not] identifier[self] . identifier[_dict] [ identifier[k] ]]
keyword[for] identifier[key] keyword[in] identifier[_blanks] :
keyword[del] identifier[self] . identifier[_dict] [ identifier[key] ] | def rm_blanks(self):
"""
Get rid of parameters that has no value.
"""
_blanks = [k for k in self._dict.keys() if not self._dict[k]]
for key in _blanks:
del self._dict[key] # depends on [control=['for'], data=['key']] |
def _rename_full_name(self, full_name, other_trajectory, used_runs=None, new_run_idx=None):
"""Renames a full name based on the wildcards and a particular run"""
split_name = full_name.split('.')
for idx, name in enumerate(split_name):
if name in other_trajectory._reversed_wildcards:
run_indices, wildcards = other_trajectory._reversed_wildcards[name]
if new_run_idx is None:
# We can safely take the first index of the index list that matches
run_idx = None
for run_jdx in run_indices:
if run_jdx in used_runs:
run_idx = used_runs[run_jdx]
break
elif run_jdx == -1:
run_idx = -1
break
if run_idx is None:
raise RuntimeError('You shall not pass!')
else:
run_idx = new_run_idx
new_name = self.f_wildcard(wildcards[0], run_idx)
split_name[idx] = new_name
full_name = '.'.join(split_name)
return full_name | def function[_rename_full_name, parameter[self, full_name, other_trajectory, used_runs, new_run_idx]]:
constant[Renames a full name based on the wildcards and a particular run]
variable[split_name] assign[=] call[name[full_name].split, parameter[constant[.]]]
for taget[tuple[[<ast.Name object at 0x7da1b032e8c0>, <ast.Name object at 0x7da1b032e980>]]] in starred[call[name[enumerate], parameter[name[split_name]]]] begin[:]
if compare[name[name] in name[other_trajectory]._reversed_wildcards] begin[:]
<ast.Tuple object at 0x7da1b032c190> assign[=] call[name[other_trajectory]._reversed_wildcards][name[name]]
if compare[name[new_run_idx] is constant[None]] begin[:]
variable[run_idx] assign[=] constant[None]
for taget[name[run_jdx]] in starred[name[run_indices]] begin[:]
if compare[name[run_jdx] in name[used_runs]] begin[:]
variable[run_idx] assign[=] call[name[used_runs]][name[run_jdx]]
break
if compare[name[run_idx] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b032ce80>
variable[new_name] assign[=] call[name[self].f_wildcard, parameter[call[name[wildcards]][constant[0]], name[run_idx]]]
call[name[split_name]][name[idx]] assign[=] name[new_name]
variable[full_name] assign[=] call[constant[.].join, parameter[name[split_name]]]
return[name[full_name]] | keyword[def] identifier[_rename_full_name] ( identifier[self] , identifier[full_name] , identifier[other_trajectory] , identifier[used_runs] = keyword[None] , identifier[new_run_idx] = keyword[None] ):
literal[string]
identifier[split_name] = identifier[full_name] . identifier[split] ( literal[string] )
keyword[for] identifier[idx] , identifier[name] keyword[in] identifier[enumerate] ( identifier[split_name] ):
keyword[if] identifier[name] keyword[in] identifier[other_trajectory] . identifier[_reversed_wildcards] :
identifier[run_indices] , identifier[wildcards] = identifier[other_trajectory] . identifier[_reversed_wildcards] [ identifier[name] ]
keyword[if] identifier[new_run_idx] keyword[is] keyword[None] :
identifier[run_idx] = keyword[None]
keyword[for] identifier[run_jdx] keyword[in] identifier[run_indices] :
keyword[if] identifier[run_jdx] keyword[in] identifier[used_runs] :
identifier[run_idx] = identifier[used_runs] [ identifier[run_jdx] ]
keyword[break]
keyword[elif] identifier[run_jdx] ==- literal[int] :
identifier[run_idx] =- literal[int]
keyword[break]
keyword[if] identifier[run_idx] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[else] :
identifier[run_idx] = identifier[new_run_idx]
identifier[new_name] = identifier[self] . identifier[f_wildcard] ( identifier[wildcards] [ literal[int] ], identifier[run_idx] )
identifier[split_name] [ identifier[idx] ]= identifier[new_name]
identifier[full_name] = literal[string] . identifier[join] ( identifier[split_name] )
keyword[return] identifier[full_name] | def _rename_full_name(self, full_name, other_trajectory, used_runs=None, new_run_idx=None):
"""Renames a full name based on the wildcards and a particular run"""
split_name = full_name.split('.')
for (idx, name) in enumerate(split_name):
if name in other_trajectory._reversed_wildcards:
(run_indices, wildcards) = other_trajectory._reversed_wildcards[name]
if new_run_idx is None:
# We can safely take the first index of the index list that matches
run_idx = None
for run_jdx in run_indices:
if run_jdx in used_runs:
run_idx = used_runs[run_jdx]
break # depends on [control=['if'], data=['run_jdx', 'used_runs']]
elif run_jdx == -1:
run_idx = -1
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['run_jdx']]
if run_idx is None:
raise RuntimeError('You shall not pass!') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
run_idx = new_run_idx
new_name = self.f_wildcard(wildcards[0], run_idx)
split_name[idx] = new_name # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]]
full_name = '.'.join(split_name)
return full_name |
def delete(self, **kwargs):
"""
Used for deleting objects from the facebook graph. Just pass the id of the object to be
deleted. But in case of like, have to pass the cat ("likes") and object id as a like has no id
itself in the facebook graph
"""
if 'cat' not in kwargs.keys():
kwargs['cat']=''
cat=kwargs['cat']
del kwargs['cat']
res=request.publish_cat1("DELETE", self.con, self.token, cat, kwargs)
return res | def function[delete, parameter[self]]:
constant[
Used for deleting objects from the facebook graph. Just pass the id of the object to be
deleted. But in case of like, have to pass the cat ("likes") and object id as a like has no id
itself in the facebook graph
]
if compare[constant[cat] <ast.NotIn object at 0x7da2590d7190> call[name[kwargs].keys, parameter[]]] begin[:]
call[name[kwargs]][constant[cat]] assign[=] constant[]
variable[cat] assign[=] call[name[kwargs]][constant[cat]]
<ast.Delete object at 0x7da2041d98d0>
variable[res] assign[=] call[name[request].publish_cat1, parameter[constant[DELETE], name[self].con, name[self].token, name[cat], name[kwargs]]]
return[name[res]] | keyword[def] identifier[delete] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] . identifier[keys] ():
identifier[kwargs] [ literal[string] ]= literal[string]
identifier[cat] = identifier[kwargs] [ literal[string] ]
keyword[del] identifier[kwargs] [ literal[string] ]
identifier[res] = identifier[request] . identifier[publish_cat1] ( literal[string] , identifier[self] . identifier[con] , identifier[self] . identifier[token] , identifier[cat] , identifier[kwargs] )
keyword[return] identifier[res] | def delete(self, **kwargs):
"""
Used for deleting objects from the facebook graph. Just pass the id of the object to be
deleted. But in case of like, have to pass the cat ("likes") and object id as a like has no id
itself in the facebook graph
"""
if 'cat' not in kwargs.keys():
kwargs['cat'] = '' # depends on [control=['if'], data=[]]
cat = kwargs['cat']
del kwargs['cat']
res = request.publish_cat1('DELETE', self.con, self.token, cat, kwargs)
return res |
def add_isoquant_data(peptides, quantpeptides, quantacc, quantfields):
"""Runs through a peptide table and adds quant data from ANOTHER peptide
table that contains that data."""
for peptide in base_add_isoquant_data(peptides, quantpeptides,
peptabledata.HEADER_PEPTIDE,
quantacc, quantfields):
yield peptide | def function[add_isoquant_data, parameter[peptides, quantpeptides, quantacc, quantfields]]:
constant[Runs through a peptide table and adds quant data from ANOTHER peptide
table that contains that data.]
for taget[name[peptide]] in starred[call[name[base_add_isoquant_data], parameter[name[peptides], name[quantpeptides], name[peptabledata].HEADER_PEPTIDE, name[quantacc], name[quantfields]]]] begin[:]
<ast.Yield object at 0x7da1b23344c0> | keyword[def] identifier[add_isoquant_data] ( identifier[peptides] , identifier[quantpeptides] , identifier[quantacc] , identifier[quantfields] ):
literal[string]
keyword[for] identifier[peptide] keyword[in] identifier[base_add_isoquant_data] ( identifier[peptides] , identifier[quantpeptides] ,
identifier[peptabledata] . identifier[HEADER_PEPTIDE] ,
identifier[quantacc] , identifier[quantfields] ):
keyword[yield] identifier[peptide] | def add_isoquant_data(peptides, quantpeptides, quantacc, quantfields):
"""Runs through a peptide table and adds quant data from ANOTHER peptide
table that contains that data."""
for peptide in base_add_isoquant_data(peptides, quantpeptides, peptabledata.HEADER_PEPTIDE, quantacc, quantfields):
yield peptide # depends on [control=['for'], data=['peptide']] |
def takeTag( self, tag ):
"""
Removes the inputed tag from the system.
:param tag | <str>
:return <XMultiTagItem> || None
"""
for row in range(self.count() - 1):
item = self.item(row)
if ( item and item.text() == tag ):
self.takeItem(row)
if ( not self.signalsBlocked() ):
self.tagRemoved.emit(tag)
return item
return None | def function[takeTag, parameter[self, tag]]:
constant[
Removes the inputed tag from the system.
:param tag | <str>
:return <XMultiTagItem> || None
]
for taget[name[row]] in starred[call[name[range], parameter[binary_operation[call[name[self].count, parameter[]] - constant[1]]]]] begin[:]
variable[item] assign[=] call[name[self].item, parameter[name[row]]]
if <ast.BoolOp object at 0x7da1b2425600> begin[:]
call[name[self].takeItem, parameter[name[row]]]
if <ast.UnaryOp object at 0x7da1b2424b20> begin[:]
call[name[self].tagRemoved.emit, parameter[name[tag]]]
return[name[item]]
return[constant[None]] | keyword[def] identifier[takeTag] ( identifier[self] , identifier[tag] ):
literal[string]
keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[self] . identifier[count] ()- literal[int] ):
identifier[item] = identifier[self] . identifier[item] ( identifier[row] )
keyword[if] ( identifier[item] keyword[and] identifier[item] . identifier[text] ()== identifier[tag] ):
identifier[self] . identifier[takeItem] ( identifier[row] )
keyword[if] ( keyword[not] identifier[self] . identifier[signalsBlocked] ()):
identifier[self] . identifier[tagRemoved] . identifier[emit] ( identifier[tag] )
keyword[return] identifier[item]
keyword[return] keyword[None] | def takeTag(self, tag):
"""
Removes the inputed tag from the system.
:param tag | <str>
:return <XMultiTagItem> || None
"""
for row in range(self.count() - 1):
item = self.item(row)
if item and item.text() == tag:
self.takeItem(row)
if not self.signalsBlocked():
self.tagRemoved.emit(tag) # depends on [control=['if'], data=[]]
return item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
return None |
def go_to_previous_tab(self):
"""
Focus the previous tab.
"""
self.active_tab_index = (self.active_tab_index - 1 +
len(self.tab_pages)) % len(self.tab_pages) | def function[go_to_previous_tab, parameter[self]]:
constant[
Focus the previous tab.
]
name[self].active_tab_index assign[=] binary_operation[binary_operation[binary_operation[name[self].active_tab_index - constant[1]] + call[name[len], parameter[name[self].tab_pages]]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].tab_pages]]] | keyword[def] identifier[go_to_previous_tab] ( identifier[self] ):
literal[string]
identifier[self] . identifier[active_tab_index] =( identifier[self] . identifier[active_tab_index] - literal[int] +
identifier[len] ( identifier[self] . identifier[tab_pages] ))% identifier[len] ( identifier[self] . identifier[tab_pages] ) | def go_to_previous_tab(self):
"""
Focus the previous tab.
"""
self.active_tab_index = (self.active_tab_index - 1 + len(self.tab_pages)) % len(self.tab_pages) |
def startup(name):
'''
Start Traffic Server on the local node.
.. code-block:: yaml
startup_ats:
trafficserver.startup
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if __opts__['test']:
ret['comment'] = 'Starting up local node'
return ret
__salt__['trafficserver.startup']()
ret['result'] = True
ret['comment'] = 'Starting up local node'
return ret | def function[startup, parameter[name]]:
constant[
Start Traffic Server on the local node.
.. code-block:: yaml
startup_ats:
trafficserver.startup
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20c7ca230>, <ast.Constant object at 0x7da20c7c9b40>, <ast.Constant object at 0x7da20c7cb2e0>, <ast.Constant object at 0x7da20c7c8c70>], [<ast.Name object at 0x7da20c7c8700>, <ast.Dict object at 0x7da20c7c80a0>, <ast.Constant object at 0x7da20c7cacb0>, <ast.Constant object at 0x7da20c7c8d90>]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[Starting up local node]
return[name[ret]]
call[call[name[__salt__]][constant[trafficserver.startup]], parameter[]]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] constant[Starting up local node]
return[name[ret]] | keyword[def] identifier[startup] ( identifier[name] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[None] ,
literal[string] : literal[string] }
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[__salt__] [ literal[string] ]()
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def startup(name):
"""
Start Traffic Server on the local node.
.. code-block:: yaml
startup_ats:
trafficserver.startup
"""
ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''}
if __opts__['test']:
ret['comment'] = 'Starting up local node'
return ret # depends on [control=['if'], data=[]]
__salt__['trafficserver.startup']()
ret['result'] = True
ret['comment'] = 'Starting up local node'
return ret |
def otsu (img, bins=64):
r"""
Otsu's method to find the optimal threshold separating an image into fore- and background.
This rather expensive method iterates over a number of thresholds to separate the
images histogram into two parts with a minimal intra-class variance.
An increase in the number of bins increases the algorithms specificity at the cost of
slowing it down.
Parameters
----------
img : array_like
The image for which to determine the threshold.
bins : integer
The number of histogram bins.
Returns
-------
otsu : float
The otsu threshold to separate the input image into fore- and background.
"""
# cast bins parameter to int
bins = int(bins)
# cast img parameter to scipy arrax
img = numpy.asarray(img)
# check supplied parameters
if bins <= 1:
raise AttributeError('At least a number two bins have to be provided.')
# determine initial threshold and threshold step-length
steplength = (img.max() - img.min()) / float(bins)
initial_threshold = img.min() + steplength
# initialize best value variables
best_bcv = 0
best_threshold = initial_threshold
# iterate over the thresholds and find highest between class variance
for threshold in numpy.arange(initial_threshold, img.max(), steplength):
mask_fg = (img >= threshold)
mask_bg = (img < threshold)
wfg = numpy.count_nonzero(mask_fg)
wbg = numpy.count_nonzero(mask_bg)
if 0 == wfg or 0 == wbg: continue
mfg = img[mask_fg].mean()
mbg = img[mask_bg].mean()
bcv = wfg * wbg * math.pow(mbg - mfg, 2)
if bcv > best_bcv:
best_bcv = bcv
best_threshold = threshold
return best_threshold | def function[otsu, parameter[img, bins]]:
constant[
Otsu's method to find the optimal threshold separating an image into fore- and background.
This rather expensive method iterates over a number of thresholds to separate the
images histogram into two parts with a minimal intra-class variance.
An increase in the number of bins increases the algorithms specificity at the cost of
slowing it down.
Parameters
----------
img : array_like
The image for which to determine the threshold.
bins : integer
The number of histogram bins.
Returns
-------
otsu : float
The otsu threshold to separate the input image into fore- and background.
]
variable[bins] assign[=] call[name[int], parameter[name[bins]]]
variable[img] assign[=] call[name[numpy].asarray, parameter[name[img]]]
if compare[name[bins] less_or_equal[<=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b113fb50>
variable[steplength] assign[=] binary_operation[binary_operation[call[name[img].max, parameter[]] - call[name[img].min, parameter[]]] / call[name[float], parameter[name[bins]]]]
variable[initial_threshold] assign[=] binary_operation[call[name[img].min, parameter[]] + name[steplength]]
variable[best_bcv] assign[=] constant[0]
variable[best_threshold] assign[=] name[initial_threshold]
for taget[name[threshold]] in starred[call[name[numpy].arange, parameter[name[initial_threshold], call[name[img].max, parameter[]], name[steplength]]]] begin[:]
variable[mask_fg] assign[=] compare[name[img] greater_or_equal[>=] name[threshold]]
variable[mask_bg] assign[=] compare[name[img] less[<] name[threshold]]
variable[wfg] assign[=] call[name[numpy].count_nonzero, parameter[name[mask_fg]]]
variable[wbg] assign[=] call[name[numpy].count_nonzero, parameter[name[mask_bg]]]
if <ast.BoolOp object at 0x7da1b113f670> begin[:]
continue
variable[mfg] assign[=] call[call[name[img]][name[mask_fg]].mean, parameter[]]
variable[mbg] assign[=] call[call[name[img]][name[mask_bg]].mean, parameter[]]
variable[bcv] assign[=] binary_operation[binary_operation[name[wfg] * name[wbg]] * call[name[math].pow, parameter[binary_operation[name[mbg] - name[mfg]], constant[2]]]]
if compare[name[bcv] greater[>] name[best_bcv]] begin[:]
variable[best_bcv] assign[=] name[bcv]
variable[best_threshold] assign[=] name[threshold]
return[name[best_threshold]] | keyword[def] identifier[otsu] ( identifier[img] , identifier[bins] = literal[int] ):
literal[string]
identifier[bins] = identifier[int] ( identifier[bins] )
identifier[img] = identifier[numpy] . identifier[asarray] ( identifier[img] )
keyword[if] identifier[bins] <= literal[int] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[steplength] =( identifier[img] . identifier[max] ()- identifier[img] . identifier[min] ())/ identifier[float] ( identifier[bins] )
identifier[initial_threshold] = identifier[img] . identifier[min] ()+ identifier[steplength]
identifier[best_bcv] = literal[int]
identifier[best_threshold] = identifier[initial_threshold]
keyword[for] identifier[threshold] keyword[in] identifier[numpy] . identifier[arange] ( identifier[initial_threshold] , identifier[img] . identifier[max] (), identifier[steplength] ):
identifier[mask_fg] =( identifier[img] >= identifier[threshold] )
identifier[mask_bg] =( identifier[img] < identifier[threshold] )
identifier[wfg] = identifier[numpy] . identifier[count_nonzero] ( identifier[mask_fg] )
identifier[wbg] = identifier[numpy] . identifier[count_nonzero] ( identifier[mask_bg] )
keyword[if] literal[int] == identifier[wfg] keyword[or] literal[int] == identifier[wbg] : keyword[continue]
identifier[mfg] = identifier[img] [ identifier[mask_fg] ]. identifier[mean] ()
identifier[mbg] = identifier[img] [ identifier[mask_bg] ]. identifier[mean] ()
identifier[bcv] = identifier[wfg] * identifier[wbg] * identifier[math] . identifier[pow] ( identifier[mbg] - identifier[mfg] , literal[int] )
keyword[if] identifier[bcv] > identifier[best_bcv] :
identifier[best_bcv] = identifier[bcv]
identifier[best_threshold] = identifier[threshold]
keyword[return] identifier[best_threshold] | def otsu(img, bins=64):
"""
Otsu's method to find the optimal threshold separating an image into fore- and background.
This rather expensive method iterates over a number of thresholds to separate the
images histogram into two parts with a minimal intra-class variance.
An increase in the number of bins increases the algorithms specificity at the cost of
slowing it down.
Parameters
----------
img : array_like
The image for which to determine the threshold.
bins : integer
The number of histogram bins.
Returns
-------
otsu : float
The otsu threshold to separate the input image into fore- and background.
"""
# cast bins parameter to int
bins = int(bins)
# cast img parameter to scipy arrax
img = numpy.asarray(img)
# check supplied parameters
if bins <= 1:
raise AttributeError('At least a number two bins have to be provided.') # depends on [control=['if'], data=[]]
# determine initial threshold and threshold step-length
steplength = (img.max() - img.min()) / float(bins)
initial_threshold = img.min() + steplength
# initialize best value variables
best_bcv = 0
best_threshold = initial_threshold
# iterate over the thresholds and find highest between class variance
for threshold in numpy.arange(initial_threshold, img.max(), steplength):
mask_fg = img >= threshold
mask_bg = img < threshold
wfg = numpy.count_nonzero(mask_fg)
wbg = numpy.count_nonzero(mask_bg)
if 0 == wfg or 0 == wbg:
continue # depends on [control=['if'], data=[]]
mfg = img[mask_fg].mean()
mbg = img[mask_bg].mean()
bcv = wfg * wbg * math.pow(mbg - mfg, 2)
if bcv > best_bcv:
best_bcv = bcv
best_threshold = threshold # depends on [control=['if'], data=['bcv', 'best_bcv']] # depends on [control=['for'], data=['threshold']]
return best_threshold |
def get_zone_info(cls, area_str, match_type='EXACT', result_type='LIST'):
"""
输入包含省份、城市、地区信息的内容,返回地区编号;
:param:
* area_str: (string) 要查询的区域,省份、城市、地区信息,比如 北京市
* match_type: (string) 查询匹配模式,默认值 'EXACT',表示精确匹配,可选 'FUZZY',表示模糊查询
* result_type: (string) 返回结果数量类型,默认值 'LIST',表示返回列表,可选 'SINGLE_STR',返回结果的第一个地区编号字符串
:returns:
* 返回类型 根据 resule_type 决定返回类型是列表或者单一字符串,列表中包含元组 比如:[('110000', '北京市')],元组中的第一个元素是地区码,
第二个元素是对应的区域内容 结果最多返回 20 个。
举例如下::
from fishbase.fish_data import *
print('--- fish_data get_zone_info demo ---')
result = IdCard.get_zone_info(area_str='北京市')
print(result)
# 模糊查询
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY')
print(result)
result0 = []
for i in result:
result0.append(i[0])
print('---西安市---')
print(len(result0))
print(result0)
# 模糊查询, 结果返回设定 single_str
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
# 模糊查询, 结果返回设定 single_str,西安市 和 西安 的差别
result = IdCard.get_zone_info(area_str='西安', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
print('---')
输出结果::
--- fish_data get_zone_info demo ---
[('110000', '北京市')]
130522198407316471 True
---西安市---
11
['610100', '610101', '610102', '610103', '610104', '610111', '610112', '610113', '610114', '610115',
'610116']
610100
220403
---
"""
values = []
if match_type == 'EXACT':
values = sqlite_query('fish_data.sqlite',
'select zone, areanote from cn_idcard where areanote = :area', {"area": area_str})
if match_type == 'FUZZY':
values = sqlite_query('fish_data.sqlite',
'select zone, areanote from cn_idcard where areanote like :area',
{"area": '%' + area_str + '%'})
# result_type 结果数量判断处理
if result_type == 'LIST':
# 如果返回记录多,大于 20 项,只返回前面 20 个结果
if len(values) > 20:
values = values[0:20]
return values
if result_type == 'SINGLE_STR':
if len(values) == 0:
return ''
if len(values) > 0:
value_str = values[0][0]
return value_str | def function[get_zone_info, parameter[cls, area_str, match_type, result_type]]:
constant[
输入包含省份、城市、地区信息的内容,返回地区编号;
:param:
* area_str: (string) 要查询的区域,省份、城市、地区信息,比如 北京市
* match_type: (string) 查询匹配模式,默认值 'EXACT',表示精确匹配,可选 'FUZZY',表示模糊查询
* result_type: (string) 返回结果数量类型,默认值 'LIST',表示返回列表,可选 'SINGLE_STR',返回结果的第一个地区编号字符串
:returns:
* 返回类型 根据 resule_type 决定返回类型是列表或者单一字符串,列表中包含元组 比如:[('110000', '北京市')],元组中的第一个元素是地区码,
第二个元素是对应的区域内容 结果最多返回 20 个。
举例如下::
from fishbase.fish_data import *
print('--- fish_data get_zone_info demo ---')
result = IdCard.get_zone_info(area_str='北京市')
print(result)
# 模糊查询
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY')
print(result)
result0 = []
for i in result:
result0.append(i[0])
print('---西安市---')
print(len(result0))
print(result0)
# 模糊查询, 结果返回设定 single_str
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
# 模糊查询, 结果返回设定 single_str,西安市 和 西安 的差别
result = IdCard.get_zone_info(area_str='西安', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
print('---')
输出结果::
--- fish_data get_zone_info demo ---
[('110000', '北京市')]
130522198407316471 True
---西安市---
11
['610100', '610101', '610102', '610103', '610104', '610111', '610112', '610113', '610114', '610115',
'610116']
610100
220403
---
]
variable[values] assign[=] list[[]]
if compare[name[match_type] equal[==] constant[EXACT]] begin[:]
variable[values] assign[=] call[name[sqlite_query], parameter[constant[fish_data.sqlite], constant[select zone, areanote from cn_idcard where areanote = :area], dictionary[[<ast.Constant object at 0x7da1b08e57b0>], [<ast.Name object at 0x7da1b08e64d0>]]]]
if compare[name[match_type] equal[==] constant[FUZZY]] begin[:]
variable[values] assign[=] call[name[sqlite_query], parameter[constant[fish_data.sqlite], constant[select zone, areanote from cn_idcard where areanote like :area], dictionary[[<ast.Constant object at 0x7da1b08e4220>], [<ast.BinOp object at 0x7da1b08e4d30>]]]]
if compare[name[result_type] equal[==] constant[LIST]] begin[:]
if compare[call[name[len], parameter[name[values]]] greater[>] constant[20]] begin[:]
variable[values] assign[=] call[name[values]][<ast.Slice object at 0x7da1b08e5d80>]
return[name[values]]
if compare[name[result_type] equal[==] constant[SINGLE_STR]] begin[:]
if compare[call[name[len], parameter[name[values]]] equal[==] constant[0]] begin[:]
return[constant[]]
if compare[call[name[len], parameter[name[values]]] greater[>] constant[0]] begin[:]
variable[value_str] assign[=] call[call[name[values]][constant[0]]][constant[0]]
return[name[value_str]] | keyword[def] identifier[get_zone_info] ( identifier[cls] , identifier[area_str] , identifier[match_type] = literal[string] , identifier[result_type] = literal[string] ):
literal[string]
identifier[values] =[]
keyword[if] identifier[match_type] == literal[string] :
identifier[values] = identifier[sqlite_query] ( literal[string] ,
literal[string] ,{ literal[string] : identifier[area_str] })
keyword[if] identifier[match_type] == literal[string] :
identifier[values] = identifier[sqlite_query] ( literal[string] ,
literal[string] ,
{ literal[string] : literal[string] + identifier[area_str] + literal[string] })
keyword[if] identifier[result_type] == literal[string] :
keyword[if] identifier[len] ( identifier[values] )> literal[int] :
identifier[values] = identifier[values] [ literal[int] : literal[int] ]
keyword[return] identifier[values]
keyword[if] identifier[result_type] == literal[string] :
keyword[if] identifier[len] ( identifier[values] )== literal[int] :
keyword[return] literal[string]
keyword[if] identifier[len] ( identifier[values] )> literal[int] :
identifier[value_str] = identifier[values] [ literal[int] ][ literal[int] ]
keyword[return] identifier[value_str] | def get_zone_info(cls, area_str, match_type='EXACT', result_type='LIST'):
"""
输入包含省份、城市、地区信息的内容,返回地区编号;
:param:
* area_str: (string) 要查询的区域,省份、城市、地区信息,比如 北京市
* match_type: (string) 查询匹配模式,默认值 'EXACT',表示精确匹配,可选 'FUZZY',表示模糊查询
* result_type: (string) 返回结果数量类型,默认值 'LIST',表示返回列表,可选 'SINGLE_STR',返回结果的第一个地区编号字符串
:returns:
* 返回类型 根据 resule_type 决定返回类型是列表或者单一字符串,列表中包含元组 比如:[('110000', '北京市')],元组中的第一个元素是地区码,
第二个元素是对应的区域内容 结果最多返回 20 个。
举例如下::
from fishbase.fish_data import *
print('--- fish_data get_zone_info demo ---')
result = IdCard.get_zone_info(area_str='北京市')
print(result)
# 模糊查询
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY')
print(result)
result0 = []
for i in result:
result0.append(i[0])
print('---西安市---')
print(len(result0))
print(result0)
# 模糊查询, 结果返回设定 single_str
result = IdCard.get_zone_info(area_str='西安市', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
# 模糊查询, 结果返回设定 single_str,西安市 和 西安 的差别
result = IdCard.get_zone_info(area_str='西安', match_type='FUZZY', result_type='SINGLE_STR')
print(result)
print('---')
输出结果::
--- fish_data get_zone_info demo ---
[('110000', '北京市')]
130522198407316471 True
---西安市---
11
['610100', '610101', '610102', '610103', '610104', '610111', '610112', '610113', '610114', '610115',
'610116']
610100
220403
---
"""
values = []
if match_type == 'EXACT':
values = sqlite_query('fish_data.sqlite', 'select zone, areanote from cn_idcard where areanote = :area', {'area': area_str}) # depends on [control=['if'], data=[]]
if match_type == 'FUZZY':
values = sqlite_query('fish_data.sqlite', 'select zone, areanote from cn_idcard where areanote like :area', {'area': '%' + area_str + '%'}) # depends on [control=['if'], data=[]]
# result_type 结果数量判断处理
if result_type == 'LIST':
# 如果返回记录多,大于 20 项,只返回前面 20 个结果
if len(values) > 20:
values = values[0:20] # depends on [control=['if'], data=[]]
return values # depends on [control=['if'], data=[]]
if result_type == 'SINGLE_STR':
if len(values) == 0:
return '' # depends on [control=['if'], data=[]]
if len(values) > 0:
value_str = values[0][0]
return value_str # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def docker_inspect(image):
'''Inspects a docker image
Returns: Parsed JSON data
'''
args = ['docker', 'inspect', '--type', 'image', image]
p = Popen(args, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
if not p.returncode == 0:
if 'no such image' in stderr.lower():
raise NoSuchImageError(image)
raise DockerError('Failed to inspect image: {}'.format(stderr.strip()))
return json.loads(stdout)[0] | def function[docker_inspect, parameter[image]]:
constant[Inspects a docker image
Returns: Parsed JSON data
]
variable[args] assign[=] list[[<ast.Constant object at 0x7da1b0d8b250>, <ast.Constant object at 0x7da1b0d8b910>, <ast.Constant object at 0x7da1b0d88ac0>, <ast.Constant object at 0x7da1b0d894e0>, <ast.Name object at 0x7da1b0d88460>]]
variable[p] assign[=] call[name[Popen], parameter[name[args]]]
<ast.Tuple object at 0x7da1b0d89360> assign[=] call[name[p].communicate, parameter[]]
variable[stdout] assign[=] call[name[stdout].decode, parameter[constant[utf-8]]]
variable[stderr] assign[=] call[name[stderr].decode, parameter[constant[utf-8]]]
if <ast.UnaryOp object at 0x7da1b0d88370> begin[:]
if compare[constant[no such image] in call[name[stderr].lower, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b0d8a050>
<ast.Raise object at 0x7da1b0d88e80>
return[call[call[name[json].loads, parameter[name[stdout]]]][constant[0]]] | keyword[def] identifier[docker_inspect] ( identifier[image] ):
literal[string]
identifier[args] =[ literal[string] , literal[string] , literal[string] , literal[string] , identifier[image] ]
identifier[p] = identifier[Popen] ( identifier[args] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[stdout] , identifier[stderr] = identifier[p] . identifier[communicate] ()
identifier[stdout] = identifier[stdout] . identifier[decode] ( literal[string] )
identifier[stderr] = identifier[stderr] . identifier[decode] ( literal[string] )
keyword[if] keyword[not] identifier[p] . identifier[returncode] == literal[int] :
keyword[if] literal[string] keyword[in] identifier[stderr] . identifier[lower] ():
keyword[raise] identifier[NoSuchImageError] ( identifier[image] )
keyword[raise] identifier[DockerError] ( literal[string] . identifier[format] ( identifier[stderr] . identifier[strip] ()))
keyword[return] identifier[json] . identifier[loads] ( identifier[stdout] )[ literal[int] ] | def docker_inspect(image):
"""Inspects a docker image
Returns: Parsed JSON data
"""
args = ['docker', 'inspect', '--type', 'image', image]
p = Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
if not p.returncode == 0:
if 'no such image' in stderr.lower():
raise NoSuchImageError(image) # depends on [control=['if'], data=[]]
raise DockerError('Failed to inspect image: {}'.format(stderr.strip())) # depends on [control=['if'], data=[]]
return json.loads(stdout)[0] |
def _apply_aeff_corrections(intensity_map, aeff_corrections):
""" Multipy a map by the effective area correction
"""
data = aeff_corrections * intensity_map.data.T
return HpxMap(data.T, intensity_map.hpx) | def function[_apply_aeff_corrections, parameter[intensity_map, aeff_corrections]]:
constant[ Multipy a map by the effective area correction
]
variable[data] assign[=] binary_operation[name[aeff_corrections] * name[intensity_map].data.T]
return[call[name[HpxMap], parameter[name[data].T, name[intensity_map].hpx]]] | keyword[def] identifier[_apply_aeff_corrections] ( identifier[intensity_map] , identifier[aeff_corrections] ):
literal[string]
identifier[data] = identifier[aeff_corrections] * identifier[intensity_map] . identifier[data] . identifier[T]
keyword[return] identifier[HpxMap] ( identifier[data] . identifier[T] , identifier[intensity_map] . identifier[hpx] ) | def _apply_aeff_corrections(intensity_map, aeff_corrections):
""" Multipy a map by the effective area correction
"""
data = aeff_corrections * intensity_map.data.T
return HpxMap(data.T, intensity_map.hpx) |
def _patched_run_hook(hook_name, project_dir, context):
"""Used to patch cookiecutter's ``run_hook`` function.
This patched version ensures that the temple.yaml file is created before
any cookiecutter hooks are executed
"""
if hook_name == 'post_gen_project':
with temple.utils.cd(project_dir):
temple.utils.write_temple_config(context['cookiecutter'],
context['template'],
context['version'])
return cc_hooks.run_hook(hook_name, project_dir, context) | def function[_patched_run_hook, parameter[hook_name, project_dir, context]]:
constant[Used to patch cookiecutter's ``run_hook`` function.
This patched version ensures that the temple.yaml file is created before
any cookiecutter hooks are executed
]
if compare[name[hook_name] equal[==] constant[post_gen_project]] begin[:]
with call[name[temple].utils.cd, parameter[name[project_dir]]] begin[:]
call[name[temple].utils.write_temple_config, parameter[call[name[context]][constant[cookiecutter]], call[name[context]][constant[template]], call[name[context]][constant[version]]]]
return[call[name[cc_hooks].run_hook, parameter[name[hook_name], name[project_dir], name[context]]]] | keyword[def] identifier[_patched_run_hook] ( identifier[hook_name] , identifier[project_dir] , identifier[context] ):
literal[string]
keyword[if] identifier[hook_name] == literal[string] :
keyword[with] identifier[temple] . identifier[utils] . identifier[cd] ( identifier[project_dir] ):
identifier[temple] . identifier[utils] . identifier[write_temple_config] ( identifier[context] [ literal[string] ],
identifier[context] [ literal[string] ],
identifier[context] [ literal[string] ])
keyword[return] identifier[cc_hooks] . identifier[run_hook] ( identifier[hook_name] , identifier[project_dir] , identifier[context] ) | def _patched_run_hook(hook_name, project_dir, context):
"""Used to patch cookiecutter's ``run_hook`` function.
This patched version ensures that the temple.yaml file is created before
any cookiecutter hooks are executed
"""
if hook_name == 'post_gen_project':
with temple.utils.cd(project_dir):
temple.utils.write_temple_config(context['cookiecutter'], context['template'], context['version']) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
return cc_hooks.run_hook(hook_name, project_dir, context) |
def connect(self):
"""
make amqp connection and create channels and queue binding
"""
self.connection = pika.BlockingConnection(BLOCKING_MQ_PARAMS)
self.client_queue = ClientQueue()
self.input_channel = self.connection.channel()
self.input_channel.exchange_declare(exchange=self.INPUT_EXCHANGE,
type='topic',
durable=True)
self.input_channel.queue_declare(queue=self.INPUT_QUEUE_NAME)
self.input_channel.queue_bind(exchange=self.INPUT_EXCHANGE, queue=self.INPUT_QUEUE_NAME)
log.info("Bind to queue named '%s' queue with exchange '%s'" % (self.INPUT_QUEUE_NAME,
self.INPUT_EXCHANGE)) | def function[connect, parameter[self]]:
constant[
make amqp connection and create channels and queue binding
]
name[self].connection assign[=] call[name[pika].BlockingConnection, parameter[name[BLOCKING_MQ_PARAMS]]]
name[self].client_queue assign[=] call[name[ClientQueue], parameter[]]
name[self].input_channel assign[=] call[name[self].connection.channel, parameter[]]
call[name[self].input_channel.exchange_declare, parameter[]]
call[name[self].input_channel.queue_declare, parameter[]]
call[name[self].input_channel.queue_bind, parameter[]]
call[name[log].info, parameter[binary_operation[constant[Bind to queue named '%s' queue with exchange '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204344e80>, <ast.Attribute object at 0x7da204344a90>]]]]] | keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
identifier[self] . identifier[connection] = identifier[pika] . identifier[BlockingConnection] ( identifier[BLOCKING_MQ_PARAMS] )
identifier[self] . identifier[client_queue] = identifier[ClientQueue] ()
identifier[self] . identifier[input_channel] = identifier[self] . identifier[connection] . identifier[channel] ()
identifier[self] . identifier[input_channel] . identifier[exchange_declare] ( identifier[exchange] = identifier[self] . identifier[INPUT_EXCHANGE] ,
identifier[type] = literal[string] ,
identifier[durable] = keyword[True] )
identifier[self] . identifier[input_channel] . identifier[queue_declare] ( identifier[queue] = identifier[self] . identifier[INPUT_QUEUE_NAME] )
identifier[self] . identifier[input_channel] . identifier[queue_bind] ( identifier[exchange] = identifier[self] . identifier[INPUT_EXCHANGE] , identifier[queue] = identifier[self] . identifier[INPUT_QUEUE_NAME] )
identifier[log] . identifier[info] ( literal[string] %( identifier[self] . identifier[INPUT_QUEUE_NAME] ,
identifier[self] . identifier[INPUT_EXCHANGE] )) | def connect(self):
"""
make amqp connection and create channels and queue binding
"""
self.connection = pika.BlockingConnection(BLOCKING_MQ_PARAMS)
self.client_queue = ClientQueue()
self.input_channel = self.connection.channel()
self.input_channel.exchange_declare(exchange=self.INPUT_EXCHANGE, type='topic', durable=True)
self.input_channel.queue_declare(queue=self.INPUT_QUEUE_NAME)
self.input_channel.queue_bind(exchange=self.INPUT_EXCHANGE, queue=self.INPUT_QUEUE_NAME)
log.info("Bind to queue named '%s' queue with exchange '%s'" % (self.INPUT_QUEUE_NAME, self.INPUT_EXCHANGE)) |
def check_url (aggregate):
"""Helper function waiting for URL queue."""
while True:
try:
aggregate.urlqueue.join(timeout=30)
break
except urlqueue.Timeout:
# Cleanup threads every 30 seconds
aggregate.remove_stopped_threads()
if not any(aggregate.get_check_threads()):
break | def function[check_url, parameter[aggregate]]:
constant[Helper function waiting for URL queue.]
while constant[True] begin[:]
<ast.Try object at 0x7da1b2345f00> | keyword[def] identifier[check_url] ( identifier[aggregate] ):
literal[string]
keyword[while] keyword[True] :
keyword[try] :
identifier[aggregate] . identifier[urlqueue] . identifier[join] ( identifier[timeout] = literal[int] )
keyword[break]
keyword[except] identifier[urlqueue] . identifier[Timeout] :
identifier[aggregate] . identifier[remove_stopped_threads] ()
keyword[if] keyword[not] identifier[any] ( identifier[aggregate] . identifier[get_check_threads] ()):
keyword[break] | def check_url(aggregate):
"""Helper function waiting for URL queue."""
while True:
try:
aggregate.urlqueue.join(timeout=30)
break # depends on [control=['try'], data=[]]
except urlqueue.Timeout:
# Cleanup threads every 30 seconds
aggregate.remove_stopped_threads()
if not any(aggregate.get_check_threads()):
break # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def json_qry(dataset, qry_str, params={}):
""" Takes a json query string and returns the results
args:
dataset: RdfDataset to query against
qry_str: query string
params: dictionary of params
"""
# if qry_str.startswith("$.bf_itemOf[rdf_type=bf_Print].='print',\n"):
# pdb.set_trace()
if not '$' in qry_str:
qry_str = ".".join(['$', qry_str.strip()])
dallor_val = params.get("$", dataset)
if isinstance(dallor_val, rdflib.URIRef):
dallor_val = Uri(dallor_val)
if qry_str.strip() == '$':
return [dallor_val]
parsed_qry = parse_json_qry(qry_str)
qry_parts = parsed_qry['qry_parts']
post_actions = parsed_qry['params']
# print(qry_parts)
rtn_list = UniqueList()
if params.get('dataset'):
dataset = params['dataset']
for or_part in qry_parts:
if or_part[1] == 0:
if isinstance(dallor_val, dict):
result = dallor_val
else:
try:
result = dataset[dallor_val]
except KeyError:
try:
result = dataset[Uri(dallor_val)]
except KeyError:
try:
result = dataset[BlankNode(dallor_val)]
except KeyError:
continue
forward = True
for part in or_part[0][1:]:
if part == "*":
forward = not forward
else:
if forward:
result = get_json_qry_item(result, part)
else:
result = get_reverse_json_qry_item(result,
part,
False)
else:
result = dataset
parts = or_part[0].copy()
parts.reverse()
forward = False
for part in parts[1:]:
if part == "*":
forward = not forward
else:
if forward:
result = get_json_qry_item(result, part)
else:
result = get_reverse_json_qry_item(result,
part,
False,
dallor_val)
rtn_list += result
for action in post_actions:
rtn_list = action(rtn_list)
return rtn_list | def function[json_qry, parameter[dataset, qry_str, params]]:
constant[ Takes a json query string and returns the results
args:
dataset: RdfDataset to query against
qry_str: query string
params: dictionary of params
]
if <ast.UnaryOp object at 0x7da20c6a86d0> begin[:]
variable[qry_str] assign[=] call[constant[.].join, parameter[list[[<ast.Constant object at 0x7da20c6aa650>, <ast.Call object at 0x7da20c6a8220>]]]]
variable[dallor_val] assign[=] call[name[params].get, parameter[constant[$], name[dataset]]]
if call[name[isinstance], parameter[name[dallor_val], name[rdflib].URIRef]] begin[:]
variable[dallor_val] assign[=] call[name[Uri], parameter[name[dallor_val]]]
if compare[call[name[qry_str].strip, parameter[]] equal[==] constant[$]] begin[:]
return[list[[<ast.Name object at 0x7da20c6ab580>]]]
variable[parsed_qry] assign[=] call[name[parse_json_qry], parameter[name[qry_str]]]
variable[qry_parts] assign[=] call[name[parsed_qry]][constant[qry_parts]]
variable[post_actions] assign[=] call[name[parsed_qry]][constant[params]]
variable[rtn_list] assign[=] call[name[UniqueList], parameter[]]
if call[name[params].get, parameter[constant[dataset]]] begin[:]
variable[dataset] assign[=] call[name[params]][constant[dataset]]
for taget[name[or_part]] in starred[name[qry_parts]] begin[:]
if compare[call[name[or_part]][constant[1]] equal[==] constant[0]] begin[:]
if call[name[isinstance], parameter[name[dallor_val], name[dict]]] begin[:]
variable[result] assign[=] name[dallor_val]
variable[forward] assign[=] constant[True]
for taget[name[part]] in starred[call[call[name[or_part]][constant[0]]][<ast.Slice object at 0x7da20c6a8160>]] begin[:]
if compare[name[part] equal[==] constant[*]] begin[:]
variable[forward] assign[=] <ast.UnaryOp object at 0x7da20c6aba60>
<ast.AugAssign object at 0x7da204565c90>
for taget[name[action]] in starred[name[post_actions]] begin[:]
variable[rtn_list] assign[=] call[name[action], parameter[name[rtn_list]]]
return[name[rtn_list]] | keyword[def] identifier[json_qry] ( identifier[dataset] , identifier[qry_str] , identifier[params] ={}):
literal[string]
keyword[if] keyword[not] literal[string] keyword[in] identifier[qry_str] :
identifier[qry_str] = literal[string] . identifier[join] ([ literal[string] , identifier[qry_str] . identifier[strip] ()])
identifier[dallor_val] = identifier[params] . identifier[get] ( literal[string] , identifier[dataset] )
keyword[if] identifier[isinstance] ( identifier[dallor_val] , identifier[rdflib] . identifier[URIRef] ):
identifier[dallor_val] = identifier[Uri] ( identifier[dallor_val] )
keyword[if] identifier[qry_str] . identifier[strip] ()== literal[string] :
keyword[return] [ identifier[dallor_val] ]
identifier[parsed_qry] = identifier[parse_json_qry] ( identifier[qry_str] )
identifier[qry_parts] = identifier[parsed_qry] [ literal[string] ]
identifier[post_actions] = identifier[parsed_qry] [ literal[string] ]
identifier[rtn_list] = identifier[UniqueList] ()
keyword[if] identifier[params] . identifier[get] ( literal[string] ):
identifier[dataset] = identifier[params] [ literal[string] ]
keyword[for] identifier[or_part] keyword[in] identifier[qry_parts] :
keyword[if] identifier[or_part] [ literal[int] ]== literal[int] :
keyword[if] identifier[isinstance] ( identifier[dallor_val] , identifier[dict] ):
identifier[result] = identifier[dallor_val]
keyword[else] :
keyword[try] :
identifier[result] = identifier[dataset] [ identifier[dallor_val] ]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[result] = identifier[dataset] [ identifier[Uri] ( identifier[dallor_val] )]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[result] = identifier[dataset] [ identifier[BlankNode] ( identifier[dallor_val] )]
keyword[except] identifier[KeyError] :
keyword[continue]
identifier[forward] = keyword[True]
keyword[for] identifier[part] keyword[in] identifier[or_part] [ literal[int] ][ literal[int] :]:
keyword[if] identifier[part] == literal[string] :
identifier[forward] = keyword[not] identifier[forward]
keyword[else] :
keyword[if] identifier[forward] :
identifier[result] = identifier[get_json_qry_item] ( identifier[result] , identifier[part] )
keyword[else] :
identifier[result] = identifier[get_reverse_json_qry_item] ( identifier[result] ,
identifier[part] ,
keyword[False] )
keyword[else] :
identifier[result] = identifier[dataset]
identifier[parts] = identifier[or_part] [ literal[int] ]. identifier[copy] ()
identifier[parts] . identifier[reverse] ()
identifier[forward] = keyword[False]
keyword[for] identifier[part] keyword[in] identifier[parts] [ literal[int] :]:
keyword[if] identifier[part] == literal[string] :
identifier[forward] = keyword[not] identifier[forward]
keyword[else] :
keyword[if] identifier[forward] :
identifier[result] = identifier[get_json_qry_item] ( identifier[result] , identifier[part] )
keyword[else] :
identifier[result] = identifier[get_reverse_json_qry_item] ( identifier[result] ,
identifier[part] ,
keyword[False] ,
identifier[dallor_val] )
identifier[rtn_list] += identifier[result]
keyword[for] identifier[action] keyword[in] identifier[post_actions] :
identifier[rtn_list] = identifier[action] ( identifier[rtn_list] )
keyword[return] identifier[rtn_list] | def json_qry(dataset, qry_str, params={}):
""" Takes a json query string and returns the results
args:
dataset: RdfDataset to query against
qry_str: query string
params: dictionary of params
"""
# if qry_str.startswith("$.bf_itemOf[rdf_type=bf_Print].='print',\n"):
# pdb.set_trace()
if not '$' in qry_str:
qry_str = '.'.join(['$', qry_str.strip()]) # depends on [control=['if'], data=[]]
dallor_val = params.get('$', dataset)
if isinstance(dallor_val, rdflib.URIRef):
dallor_val = Uri(dallor_val) # depends on [control=['if'], data=[]]
if qry_str.strip() == '$':
return [dallor_val] # depends on [control=['if'], data=[]]
parsed_qry = parse_json_qry(qry_str)
qry_parts = parsed_qry['qry_parts']
post_actions = parsed_qry['params']
# print(qry_parts)
rtn_list = UniqueList()
if params.get('dataset'):
dataset = params['dataset'] # depends on [control=['if'], data=[]]
for or_part in qry_parts:
if or_part[1] == 0:
if isinstance(dallor_val, dict):
result = dallor_val # depends on [control=['if'], data=[]]
else:
try:
result = dataset[dallor_val] # depends on [control=['try'], data=[]]
except KeyError:
try:
result = dataset[Uri(dallor_val)] # depends on [control=['try'], data=[]]
except KeyError:
try:
result = dataset[BlankNode(dallor_val)] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
forward = True
for part in or_part[0][1:]:
if part == '*':
forward = not forward # depends on [control=['if'], data=[]]
elif forward:
result = get_json_qry_item(result, part) # depends on [control=['if'], data=[]]
else:
result = get_reverse_json_qry_item(result, part, False) # depends on [control=['for'], data=['part']] # depends on [control=['if'], data=[]]
else:
result = dataset
parts = or_part[0].copy()
parts.reverse()
forward = False
for part in parts[1:]:
if part == '*':
forward = not forward # depends on [control=['if'], data=[]]
elif forward:
result = get_json_qry_item(result, part) # depends on [control=['if'], data=[]]
else:
result = get_reverse_json_qry_item(result, part, False, dallor_val) # depends on [control=['for'], data=['part']]
rtn_list += result # depends on [control=['for'], data=['or_part']]
for action in post_actions:
rtn_list = action(rtn_list) # depends on [control=['for'], data=['action']]
return rtn_list |
def centerOnDateTime(self, dtime):
"""
Centers the view on a given datetime for the gantt widget.
:param dtime | <QDateTime>
"""
view = self.uiGanttVIEW
scene = view.scene()
point = view.mapToScene(0, 0)
x = scene.datetimeXPos(dtime)
y = point.y()
view.centerOn(x, y) | def function[centerOnDateTime, parameter[self, dtime]]:
constant[
Centers the view on a given datetime for the gantt widget.
:param dtime | <QDateTime>
]
variable[view] assign[=] name[self].uiGanttVIEW
variable[scene] assign[=] call[name[view].scene, parameter[]]
variable[point] assign[=] call[name[view].mapToScene, parameter[constant[0], constant[0]]]
variable[x] assign[=] call[name[scene].datetimeXPos, parameter[name[dtime]]]
variable[y] assign[=] call[name[point].y, parameter[]]
call[name[view].centerOn, parameter[name[x], name[y]]] | keyword[def] identifier[centerOnDateTime] ( identifier[self] , identifier[dtime] ):
literal[string]
identifier[view] = identifier[self] . identifier[uiGanttVIEW]
identifier[scene] = identifier[view] . identifier[scene] ()
identifier[point] = identifier[view] . identifier[mapToScene] ( literal[int] , literal[int] )
identifier[x] = identifier[scene] . identifier[datetimeXPos] ( identifier[dtime] )
identifier[y] = identifier[point] . identifier[y] ()
identifier[view] . identifier[centerOn] ( identifier[x] , identifier[y] ) | def centerOnDateTime(self, dtime):
"""
Centers the view on a given datetime for the gantt widget.
:param dtime | <QDateTime>
"""
view = self.uiGanttVIEW
scene = view.scene()
point = view.mapToScene(0, 0)
x = scene.datetimeXPos(dtime)
y = point.y()
view.centerOn(x, y) |
def get_messages(self,
statuses=DEFAULT_MESSAGE_STATUSES,
order="sent_at desc",
offset=None,
count=None,
content=False):
"""Returns a list of messages your account sent.
Messages are sorted by ``order``, starting at an optional integer ``offset``, and optionally limited to the first ``count`` items (in sorted order).
Returned data includes various statistics about each message, e.g., ``total_opens``, ``open_rate``, ``total_clicks``, ``unsubs``, ``soft_bounces``. If ``content=True``, the returned data will also include HTML content of each message.
"""
req_data = [ { "status": statuses }, order, fmt_paging(offset, count) ]
service = "query:Message.stats"
if content: service += ", Message.content"
return self.request(service, req_data) | def function[get_messages, parameter[self, statuses, order, offset, count, content]]:
constant[Returns a list of messages your account sent.
Messages are sorted by ``order``, starting at an optional integer ``offset``, and optionally limited to the first ``count`` items (in sorted order).
Returned data includes various statistics about each message, e.g., ``total_opens``, ``open_rate``, ``total_clicks``, ``unsubs``, ``soft_bounces``. If ``content=True``, the returned data will also include HTML content of each message.
]
variable[req_data] assign[=] list[[<ast.Dict object at 0x7da1b0a05a80>, <ast.Name object at 0x7da1b0a068c0>, <ast.Call object at 0x7da1b0a05930>]]
variable[service] assign[=] constant[query:Message.stats]
if name[content] begin[:]
<ast.AugAssign object at 0x7da1b0a06920>
return[call[name[self].request, parameter[name[service], name[req_data]]]] | keyword[def] identifier[get_messages] ( identifier[self] ,
identifier[statuses] = identifier[DEFAULT_MESSAGE_STATUSES] ,
identifier[order] = literal[string] ,
identifier[offset] = keyword[None] ,
identifier[count] = keyword[None] ,
identifier[content] = keyword[False] ):
literal[string]
identifier[req_data] =[{ literal[string] : identifier[statuses] }, identifier[order] , identifier[fmt_paging] ( identifier[offset] , identifier[count] )]
identifier[service] = literal[string]
keyword[if] identifier[content] : identifier[service] += literal[string]
keyword[return] identifier[self] . identifier[request] ( identifier[service] , identifier[req_data] ) | def get_messages(self, statuses=DEFAULT_MESSAGE_STATUSES, order='sent_at desc', offset=None, count=None, content=False):
"""Returns a list of messages your account sent.
Messages are sorted by ``order``, starting at an optional integer ``offset``, and optionally limited to the first ``count`` items (in sorted order).
Returned data includes various statistics about each message, e.g., ``total_opens``, ``open_rate``, ``total_clicks``, ``unsubs``, ``soft_bounces``. If ``content=True``, the returned data will also include HTML content of each message.
"""
req_data = [{'status': statuses}, order, fmt_paging(offset, count)]
service = 'query:Message.stats'
if content:
service += ', Message.content' # depends on [control=['if'], data=[]]
return self.request(service, req_data) |
def image_information_response(self):
"""Parse image information request and create response."""
dr = degraded_request(self.identifier)
if (dr):
self.logger.info("image_information: degraded %s -> %s" %
(self.identifier, dr))
self.degraded = self.identifier
self.identifier = dr
else:
self.logger.info("image_information: %s" % (self.identifier))
# get size
self.manipulator.srcfile = self.file
self.manipulator.do_first()
# most of info.json comes from config, a few things specific to image
info = {'tile_height': self.config.tile_height,
'tile_width': self.config.tile_width,
'scale_factors': self.config.scale_factors
}
# calculate scale factors if not hard-coded
if ('auto' in self.config.scale_factors):
info['scale_factors'] = self.manipulator.scale_factors(
self.config.tile_width, self.config.tile_height)
i = IIIFInfo(conf=info, api_version=self.api_version)
i.server_and_prefix = self.server_and_prefix
i.identifier = self.iiif.identifier
i.width = self.manipulator.width
i.height = self.manipulator.height
if (self.api_version >= '2.0'):
# FIXME - should come from manipulator
i.qualities = ["default", "color", "gray"]
else:
# FIXME - should come from manipulator
i.qualities = ["native", "color", "gray"]
i.formats = ["jpg", "png"] # FIXME - should come from manipulator
if (self.auth):
self.auth.add_services(i)
return self.make_response(i.as_json(),
headers={"Content-Type": self.json_mime_type}) | def function[image_information_response, parameter[self]]:
constant[Parse image information request and create response.]
variable[dr] assign[=] call[name[degraded_request], parameter[name[self].identifier]]
if name[dr] begin[:]
call[name[self].logger.info, parameter[binary_operation[constant[image_information: degraded %s -> %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b031cd00>, <ast.Name object at 0x7da1b031d120>]]]]]
name[self].degraded assign[=] name[self].identifier
name[self].identifier assign[=] name[dr]
name[self].manipulator.srcfile assign[=] name[self].file
call[name[self].manipulator.do_first, parameter[]]
variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b04d9330>, <ast.Constant object at 0x7da1b04da3e0>, <ast.Constant object at 0x7da1b04d9810>], [<ast.Attribute object at 0x7da1b04dbfa0>, <ast.Attribute object at 0x7da1b04daa40>, <ast.Attribute object at 0x7da1b04dbe20>]]
if compare[constant[auto] in name[self].config.scale_factors] begin[:]
call[name[info]][constant[scale_factors]] assign[=] call[name[self].manipulator.scale_factors, parameter[name[self].config.tile_width, name[self].config.tile_height]]
variable[i] assign[=] call[name[IIIFInfo], parameter[]]
name[i].server_and_prefix assign[=] name[self].server_and_prefix
name[i].identifier assign[=] name[self].iiif.identifier
name[i].width assign[=] name[self].manipulator.width
name[i].height assign[=] name[self].manipulator.height
if compare[name[self].api_version greater_or_equal[>=] constant[2.0]] begin[:]
name[i].qualities assign[=] list[[<ast.Constant object at 0x7da20c6c64a0>, <ast.Constant object at 0x7da20c6c6050>, <ast.Constant object at 0x7da20c6c4880>]]
name[i].formats assign[=] list[[<ast.Constant object at 0x7da20c6c7d30>, <ast.Constant object at 0x7da20c6c5c00>]]
if name[self].auth begin[:]
call[name[self].auth.add_services, parameter[name[i]]]
return[call[name[self].make_response, parameter[call[name[i].as_json, parameter[]]]]] | keyword[def] identifier[image_information_response] ( identifier[self] ):
literal[string]
identifier[dr] = identifier[degraded_request] ( identifier[self] . identifier[identifier] )
keyword[if] ( identifier[dr] ):
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %
( identifier[self] . identifier[identifier] , identifier[dr] ))
identifier[self] . identifier[degraded] = identifier[self] . identifier[identifier]
identifier[self] . identifier[identifier] = identifier[dr]
keyword[else] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[self] . identifier[identifier] ))
identifier[self] . identifier[manipulator] . identifier[srcfile] = identifier[self] . identifier[file]
identifier[self] . identifier[manipulator] . identifier[do_first] ()
identifier[info] ={ literal[string] : identifier[self] . identifier[config] . identifier[tile_height] ,
literal[string] : identifier[self] . identifier[config] . identifier[tile_width] ,
literal[string] : identifier[self] . identifier[config] . identifier[scale_factors]
}
keyword[if] ( literal[string] keyword[in] identifier[self] . identifier[config] . identifier[scale_factors] ):
identifier[info] [ literal[string] ]= identifier[self] . identifier[manipulator] . identifier[scale_factors] (
identifier[self] . identifier[config] . identifier[tile_width] , identifier[self] . identifier[config] . identifier[tile_height] )
identifier[i] = identifier[IIIFInfo] ( identifier[conf] = identifier[info] , identifier[api_version] = identifier[self] . identifier[api_version] )
identifier[i] . identifier[server_and_prefix] = identifier[self] . identifier[server_and_prefix]
identifier[i] . identifier[identifier] = identifier[self] . identifier[iiif] . identifier[identifier]
identifier[i] . identifier[width] = identifier[self] . identifier[manipulator] . identifier[width]
identifier[i] . identifier[height] = identifier[self] . identifier[manipulator] . identifier[height]
keyword[if] ( identifier[self] . identifier[api_version] >= literal[string] ):
identifier[i] . identifier[qualities] =[ literal[string] , literal[string] , literal[string] ]
keyword[else] :
identifier[i] . identifier[qualities] =[ literal[string] , literal[string] , literal[string] ]
identifier[i] . identifier[formats] =[ literal[string] , literal[string] ]
keyword[if] ( identifier[self] . identifier[auth] ):
identifier[self] . identifier[auth] . identifier[add_services] ( identifier[i] )
keyword[return] identifier[self] . identifier[make_response] ( identifier[i] . identifier[as_json] (),
identifier[headers] ={ literal[string] : identifier[self] . identifier[json_mime_type] }) | def image_information_response(self):
"""Parse image information request and create response."""
dr = degraded_request(self.identifier)
if dr:
self.logger.info('image_information: degraded %s -> %s' % (self.identifier, dr))
self.degraded = self.identifier
self.identifier = dr # depends on [control=['if'], data=[]]
else:
self.logger.info('image_information: %s' % self.identifier)
# get size
self.manipulator.srcfile = self.file
self.manipulator.do_first()
# most of info.json comes from config, a few things specific to image
info = {'tile_height': self.config.tile_height, 'tile_width': self.config.tile_width, 'scale_factors': self.config.scale_factors}
# calculate scale factors if not hard-coded
if 'auto' in self.config.scale_factors:
info['scale_factors'] = self.manipulator.scale_factors(self.config.tile_width, self.config.tile_height) # depends on [control=['if'], data=[]]
i = IIIFInfo(conf=info, api_version=self.api_version)
i.server_and_prefix = self.server_and_prefix
i.identifier = self.iiif.identifier
i.width = self.manipulator.width
i.height = self.manipulator.height
if self.api_version >= '2.0':
# FIXME - should come from manipulator
i.qualities = ['default', 'color', 'gray'] # depends on [control=['if'], data=[]]
else:
# FIXME - should come from manipulator
i.qualities = ['native', 'color', 'gray']
i.formats = ['jpg', 'png'] # FIXME - should come from manipulator
if self.auth:
self.auth.add_services(i) # depends on [control=['if'], data=[]]
return self.make_response(i.as_json(), headers={'Content-Type': self.json_mime_type}) |
def gcd_float(numbers, tol=1e-8):
"""
Returns the greatest common divisor for a sequence of numbers.
Uses a numerical tolerance, so can be used on floats
Args:
numbers: Sequence of numbers.
tol: Numerical tolerance
Returns:
(int) Greatest common divisor of numbers.
"""
def pair_gcd_tol(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b > tol:
a, b = b, a % b
return a
n = numbers[0]
for i in numbers:
n = pair_gcd_tol(n, i)
return n | def function[gcd_float, parameter[numbers, tol]]:
constant[
Returns the greatest common divisor for a sequence of numbers.
Uses a numerical tolerance, so can be used on floats
Args:
numbers: Sequence of numbers.
tol: Numerical tolerance
Returns:
(int) Greatest common divisor of numbers.
]
def function[pair_gcd_tol, parameter[a, b]]:
constant[Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
]
while compare[name[b] greater[>] name[tol]] begin[:]
<ast.Tuple object at 0x7da1b13798a0> assign[=] tuple[[<ast.Name object at 0x7da1b137b4c0>, <ast.BinOp object at 0x7da1b1379ff0>]]
return[name[a]]
variable[n] assign[=] call[name[numbers]][constant[0]]
for taget[name[i]] in starred[name[numbers]] begin[:]
variable[n] assign[=] call[name[pair_gcd_tol], parameter[name[n], name[i]]]
return[name[n]] | keyword[def] identifier[gcd_float] ( identifier[numbers] , identifier[tol] = literal[int] ):
literal[string]
keyword[def] identifier[pair_gcd_tol] ( identifier[a] , identifier[b] ):
literal[string]
keyword[while] identifier[b] > identifier[tol] :
identifier[a] , identifier[b] = identifier[b] , identifier[a] % identifier[b]
keyword[return] identifier[a]
identifier[n] = identifier[numbers] [ literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[numbers] :
identifier[n] = identifier[pair_gcd_tol] ( identifier[n] , identifier[i] )
keyword[return] identifier[n] | def gcd_float(numbers, tol=1e-08):
"""
Returns the greatest common divisor for a sequence of numbers.
Uses a numerical tolerance, so can be used on floats
Args:
numbers: Sequence of numbers.
tol: Numerical tolerance
Returns:
(int) Greatest common divisor of numbers.
"""
def pair_gcd_tol(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b > tol:
(a, b) = (b, a % b) # depends on [control=['while'], data=['b']]
return a
n = numbers[0]
for i in numbers:
n = pair_gcd_tol(n, i) # depends on [control=['for'], data=['i']]
return n |
def map_run(self, path=None, **kwargs):
'''
To execute a map
'''
kwarg = {}
if path:
kwarg['map'] = path
kwarg.update(kwargs)
mapper = salt.cloud.Map(self._opts_defaults(**kwarg))
dmap = mapper.map_data()
return salt.utils.data.simple_types_filter(
mapper.run_map(dmap)
) | def function[map_run, parameter[self, path]]:
constant[
To execute a map
]
variable[kwarg] assign[=] dictionary[[], []]
if name[path] begin[:]
call[name[kwarg]][constant[map]] assign[=] name[path]
call[name[kwarg].update, parameter[name[kwargs]]]
variable[mapper] assign[=] call[name[salt].cloud.Map, parameter[call[name[self]._opts_defaults, parameter[]]]]
variable[dmap] assign[=] call[name[mapper].map_data, parameter[]]
return[call[name[salt].utils.data.simple_types_filter, parameter[call[name[mapper].run_map, parameter[name[dmap]]]]]] | keyword[def] identifier[map_run] ( identifier[self] , identifier[path] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[kwarg] ={}
keyword[if] identifier[path] :
identifier[kwarg] [ literal[string] ]= identifier[path]
identifier[kwarg] . identifier[update] ( identifier[kwargs] )
identifier[mapper] = identifier[salt] . identifier[cloud] . identifier[Map] ( identifier[self] . identifier[_opts_defaults] (** identifier[kwarg] ))
identifier[dmap] = identifier[mapper] . identifier[map_data] ()
keyword[return] identifier[salt] . identifier[utils] . identifier[data] . identifier[simple_types_filter] (
identifier[mapper] . identifier[run_map] ( identifier[dmap] )
) | def map_run(self, path=None, **kwargs):
"""
To execute a map
"""
kwarg = {}
if path:
kwarg['map'] = path # depends on [control=['if'], data=[]]
kwarg.update(kwargs)
mapper = salt.cloud.Map(self._opts_defaults(**kwarg))
dmap = mapper.map_data()
return salt.utils.data.simple_types_filter(mapper.run_map(dmap)) |
def create_new_sticker_set(self, user_id, name, title, png_sticker, emojis, contains_masks=None, mask_position=None):
"""
Use this method to create new sticker set owned by a user. The bot will be able to edit the created sticker set. Returns True on success.
https://core.telegram.org/bots/api#createnewstickerset
Parameters:
:param user_id: User identifier of created sticker set owner
:type user_id: int
:param name: Short name of sticker set, to be used in t.me/addstickers/ URLs (e.g., animals). Can contain only english letters, digits and underscores. Must begin with a letter, can't contain consecutive underscores and must end in “_by_<bot username>”. <bot_username> is case insensitive. 1-64 characters.
:type name: str|unicode
:param title: Sticker set title, 1-64 characters
:type title: str|unicode
:param png_sticker: Png image with the sticker, must be up to 512 kilobytes in size, dimensions must not exceed 512px, and either width or height must be exactly 512px. Pass a file_id as a String to send a file that already exists on the Telegram servers, pass an HTTP URL as a String for Telegram to get a file from the Internet, or upload a new one using multipart/form-data. More info on Sending Files »
:type png_sticker: pytgbot.api_types.sendable.files.InputFile | str|unicode
:param emojis: One or more emoji corresponding to the sticker
:type emojis: str|unicode
Optional keyword parameters:
:param contains_masks: Pass True, if a set of mask stickers should be created
:type contains_masks: bool
:param mask_position: A JSON-serialized object for position where the mask should be placed on faces
:type mask_position: pytgbot.api_types.receivable.stickers.MaskPosition
Returns:
:return: Returns True on success
:rtype: bool
"""
from pytgbot.api_types.receivable.stickers import MaskPosition
from pytgbot.api_types.sendable.files import InputFile
assert_type_or_raise(user_id, int, parameter_name="user_id")
assert_type_or_raise(name, unicode_type, parameter_name="name")
assert_type_or_raise(title, unicode_type, parameter_name="title")
assert_type_or_raise(png_sticker, (InputFile, unicode_type), parameter_name="png_sticker")
assert_type_or_raise(emojis, unicode_type, parameter_name="emojis")
assert_type_or_raise(contains_masks, None, bool, parameter_name="contains_masks")
assert_type_or_raise(mask_position, None, MaskPosition, parameter_name="mask_position")
result = self.do("createNewStickerSet", user_id=user_id, name=name, title=title, png_sticker=png_sticker, emojis=emojis, contains_masks=contains_masks, mask_position=mask_position)
if self.return_python_objects:
logger.debug("Trying to parse {data}".format(data=repr(result)))
try:
return from_array_list(bool, result, list_level=0, is_builtin=True)
except TgApiParseException:
logger.debug("Failed parsing as primitive bool", exc_info=True)
# end try
# no valid parsing so far
raise TgApiParseException("Could not parse result.") # See debug log for details!
# end if return_python_objects
return result | def function[create_new_sticker_set, parameter[self, user_id, name, title, png_sticker, emojis, contains_masks, mask_position]]:
constant[
Use this method to create new sticker set owned by a user. The bot will be able to edit the created sticker set. Returns True on success.
https://core.telegram.org/bots/api#createnewstickerset
Parameters:
:param user_id: User identifier of created sticker set owner
:type user_id: int
:param name: Short name of sticker set, to be used in t.me/addstickers/ URLs (e.g., animals). Can contain only english letters, digits and underscores. Must begin with a letter, can't contain consecutive underscores and must end in “_by_<bot username>”. <bot_username> is case insensitive. 1-64 characters.
:type name: str|unicode
:param title: Sticker set title, 1-64 characters
:type title: str|unicode
:param png_sticker: Png image with the sticker, must be up to 512 kilobytes in size, dimensions must not exceed 512px, and either width or height must be exactly 512px. Pass a file_id as a String to send a file that already exists on the Telegram servers, pass an HTTP URL as a String for Telegram to get a file from the Internet, or upload a new one using multipart/form-data. More info on Sending Files »
:type png_sticker: pytgbot.api_types.sendable.files.InputFile | str|unicode
:param emojis: One or more emoji corresponding to the sticker
:type emojis: str|unicode
Optional keyword parameters:
:param contains_masks: Pass True, if a set of mask stickers should be created
:type contains_masks: bool
:param mask_position: A JSON-serialized object for position where the mask should be placed on faces
:type mask_position: pytgbot.api_types.receivable.stickers.MaskPosition
Returns:
:return: Returns True on success
:rtype: bool
]
from relative_module[pytgbot.api_types.receivable.stickers] import module[MaskPosition]
from relative_module[pytgbot.api_types.sendable.files] import module[InputFile]
call[name[assert_type_or_raise], parameter[name[user_id], name[int]]]
call[name[assert_type_or_raise], parameter[name[name], name[unicode_type]]]
call[name[assert_type_or_raise], parameter[name[title], name[unicode_type]]]
call[name[assert_type_or_raise], parameter[name[png_sticker], tuple[[<ast.Name object at 0x7da1b0430d30>, <ast.Name object at 0x7da1b0431390>]]]]
call[name[assert_type_or_raise], parameter[name[emojis], name[unicode_type]]]
call[name[assert_type_or_raise], parameter[name[contains_masks], constant[None], name[bool]]]
call[name[assert_type_or_raise], parameter[name[mask_position], constant[None], name[MaskPosition]]]
variable[result] assign[=] call[name[self].do, parameter[constant[createNewStickerSet]]]
if name[self].return_python_objects begin[:]
call[name[logger].debug, parameter[call[constant[Trying to parse {data}].format, parameter[]]]]
<ast.Try object at 0x7da1b04d4d00>
<ast.Raise object at 0x7da1b04d56f0>
return[name[result]] | keyword[def] identifier[create_new_sticker_set] ( identifier[self] , identifier[user_id] , identifier[name] , identifier[title] , identifier[png_sticker] , identifier[emojis] , identifier[contains_masks] = keyword[None] , identifier[mask_position] = keyword[None] ):
literal[string]
keyword[from] identifier[pytgbot] . identifier[api_types] . identifier[receivable] . identifier[stickers] keyword[import] identifier[MaskPosition]
keyword[from] identifier[pytgbot] . identifier[api_types] . identifier[sendable] . identifier[files] keyword[import] identifier[InputFile]
identifier[assert_type_or_raise] ( identifier[user_id] , identifier[int] , identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[name] , identifier[unicode_type] , identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[title] , identifier[unicode_type] , identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[png_sticker] ,( identifier[InputFile] , identifier[unicode_type] ), identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[emojis] , identifier[unicode_type] , identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[contains_masks] , keyword[None] , identifier[bool] , identifier[parameter_name] = literal[string] )
identifier[assert_type_or_raise] ( identifier[mask_position] , keyword[None] , identifier[MaskPosition] , identifier[parameter_name] = literal[string] )
identifier[result] = identifier[self] . identifier[do] ( literal[string] , identifier[user_id] = identifier[user_id] , identifier[name] = identifier[name] , identifier[title] = identifier[title] , identifier[png_sticker] = identifier[png_sticker] , identifier[emojis] = identifier[emojis] , identifier[contains_masks] = identifier[contains_masks] , identifier[mask_position] = identifier[mask_position] )
keyword[if] identifier[self] . identifier[return_python_objects] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[data] = identifier[repr] ( identifier[result] )))
keyword[try] :
keyword[return] identifier[from_array_list] ( identifier[bool] , identifier[result] , identifier[list_level] = literal[int] , identifier[is_builtin] = keyword[True] )
keyword[except] identifier[TgApiParseException] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[raise] identifier[TgApiParseException] ( literal[string] )
keyword[return] identifier[result] | def create_new_sticker_set(self, user_id, name, title, png_sticker, emojis, contains_masks=None, mask_position=None):
"""
Use this method to create new sticker set owned by a user. The bot will be able to edit the created sticker set. Returns True on success.
https://core.telegram.org/bots/api#createnewstickerset
Parameters:
:param user_id: User identifier of created sticker set owner
:type user_id: int
:param name: Short name of sticker set, to be used in t.me/addstickers/ URLs (e.g., animals). Can contain only english letters, digits and underscores. Must begin with a letter, can't contain consecutive underscores and must end in “_by_<bot username>”. <bot_username> is case insensitive. 1-64 characters.
:type name: str|unicode
:param title: Sticker set title, 1-64 characters
:type title: str|unicode
:param png_sticker: Png image with the sticker, must be up to 512 kilobytes in size, dimensions must not exceed 512px, and either width or height must be exactly 512px. Pass a file_id as a String to send a file that already exists on the Telegram servers, pass an HTTP URL as a String for Telegram to get a file from the Internet, or upload a new one using multipart/form-data. More info on Sending Files »
:type png_sticker: pytgbot.api_types.sendable.files.InputFile | str|unicode
:param emojis: One or more emoji corresponding to the sticker
:type emojis: str|unicode
Optional keyword parameters:
:param contains_masks: Pass True, if a set of mask stickers should be created
:type contains_masks: bool
:param mask_position: A JSON-serialized object for position where the mask should be placed on faces
:type mask_position: pytgbot.api_types.receivable.stickers.MaskPosition
Returns:
:return: Returns True on success
:rtype: bool
"""
from pytgbot.api_types.receivable.stickers import MaskPosition
from pytgbot.api_types.sendable.files import InputFile
assert_type_or_raise(user_id, int, parameter_name='user_id')
assert_type_or_raise(name, unicode_type, parameter_name='name')
assert_type_or_raise(title, unicode_type, parameter_name='title')
assert_type_or_raise(png_sticker, (InputFile, unicode_type), parameter_name='png_sticker')
assert_type_or_raise(emojis, unicode_type, parameter_name='emojis')
assert_type_or_raise(contains_masks, None, bool, parameter_name='contains_masks')
assert_type_or_raise(mask_position, None, MaskPosition, parameter_name='mask_position')
result = self.do('createNewStickerSet', user_id=user_id, name=name, title=title, png_sticker=png_sticker, emojis=emojis, contains_masks=contains_masks, mask_position=mask_position)
if self.return_python_objects:
logger.debug('Trying to parse {data}'.format(data=repr(result)))
try:
return from_array_list(bool, result, list_level=0, is_builtin=True) # depends on [control=['try'], data=[]]
except TgApiParseException:
logger.debug('Failed parsing as primitive bool', exc_info=True) # depends on [control=['except'], data=[]]
# end try
# no valid parsing so far
raise TgApiParseException('Could not parse result.') # See debug log for details! # depends on [control=['if'], data=[]]
# end if return_python_objects
return result |
def get_status(self, response, finished=False):
"""Given the stdout from the command returned by :meth:`cmd_status`,
return one of the status code defined in :mod:`clusterjob.status`"""
for line in response.split("\n"):
if line.strip() in self.status_mapping:
return self.status_mapping[line.strip()]
return None | def function[get_status, parameter[self, response, finished]]:
constant[Given the stdout from the command returned by :meth:`cmd_status`,
return one of the status code defined in :mod:`clusterjob.status`]
for taget[name[line]] in starred[call[name[response].split, parameter[constant[
]]]] begin[:]
if compare[call[name[line].strip, parameter[]] in name[self].status_mapping] begin[:]
return[call[name[self].status_mapping][call[name[line].strip, parameter[]]]]
return[constant[None]] | keyword[def] identifier[get_status] ( identifier[self] , identifier[response] , identifier[finished] = keyword[False] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[response] . identifier[split] ( literal[string] ):
keyword[if] identifier[line] . identifier[strip] () keyword[in] identifier[self] . identifier[status_mapping] :
keyword[return] identifier[self] . identifier[status_mapping] [ identifier[line] . identifier[strip] ()]
keyword[return] keyword[None] | def get_status(self, response, finished=False):
"""Given the stdout from the command returned by :meth:`cmd_status`,
return one of the status code defined in :mod:`clusterjob.status`"""
for line in response.split('\n'):
if line.strip() in self.status_mapping:
return self.status_mapping[line.strip()] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return None |
def stop_consuming(self):
"""Stop consuming messages.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return:
"""
if not self.consumer_tags:
return
if not self.is_closed:
for tag in self.consumer_tags:
self.basic.cancel(tag)
self.remove_consumer_tag() | def function[stop_consuming, parameter[self]]:
constant[Stop consuming messages.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return:
]
if <ast.UnaryOp object at 0x7da2054a66b0> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da2054a7670> begin[:]
for taget[name[tag]] in starred[name[self].consumer_tags] begin[:]
call[name[self].basic.cancel, parameter[name[tag]]]
call[name[self].remove_consumer_tag, parameter[]] | keyword[def] identifier[stop_consuming] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[consumer_tags] :
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[is_closed] :
keyword[for] identifier[tag] keyword[in] identifier[self] . identifier[consumer_tags] :
identifier[self] . identifier[basic] . identifier[cancel] ( identifier[tag] )
identifier[self] . identifier[remove_consumer_tag] () | def stop_consuming(self):
"""Stop consuming messages.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return:
"""
if not self.consumer_tags:
return # depends on [control=['if'], data=[]]
if not self.is_closed:
for tag in self.consumer_tags:
self.basic.cancel(tag) # depends on [control=['for'], data=['tag']] # depends on [control=['if'], data=[]]
self.remove_consumer_tag() |
def simple_generate(cls, create, **kwargs):
"""Generate a new instance.
The instance will be either 'built' or 'created'.
Args:
create (bool): whether to 'build' or 'create' the instance.
Returns:
object: the generated instance
"""
strategy = enums.CREATE_STRATEGY if create else enums.BUILD_STRATEGY
return cls.generate(strategy, **kwargs) | def function[simple_generate, parameter[cls, create]]:
constant[Generate a new instance.
The instance will be either 'built' or 'created'.
Args:
create (bool): whether to 'build' or 'create' the instance.
Returns:
object: the generated instance
]
variable[strategy] assign[=] <ast.IfExp object at 0x7da1b1d5c670>
return[call[name[cls].generate, parameter[name[strategy]]]] | keyword[def] identifier[simple_generate] ( identifier[cls] , identifier[create] ,** identifier[kwargs] ):
literal[string]
identifier[strategy] = identifier[enums] . identifier[CREATE_STRATEGY] keyword[if] identifier[create] keyword[else] identifier[enums] . identifier[BUILD_STRATEGY]
keyword[return] identifier[cls] . identifier[generate] ( identifier[strategy] ,** identifier[kwargs] ) | def simple_generate(cls, create, **kwargs):
"""Generate a new instance.
The instance will be either 'built' or 'created'.
Args:
create (bool): whether to 'build' or 'create' the instance.
Returns:
object: the generated instance
"""
strategy = enums.CREATE_STRATEGY if create else enums.BUILD_STRATEGY
return cls.generate(strategy, **kwargs) |
def apply_to_structure(self, structure):
"""
Apply the deformation gradient to a structure.
Args:
structure (Structure object): the structure object to
be modified by the deformation
"""
def_struct = structure.copy()
old_latt = def_struct.lattice.matrix
new_latt = np.transpose(np.dot(self, np.transpose(old_latt)))
def_struct.lattice = Lattice(new_latt)
return def_struct | def function[apply_to_structure, parameter[self, structure]]:
constant[
Apply the deformation gradient to a structure.
Args:
structure (Structure object): the structure object to
be modified by the deformation
]
variable[def_struct] assign[=] call[name[structure].copy, parameter[]]
variable[old_latt] assign[=] name[def_struct].lattice.matrix
variable[new_latt] assign[=] call[name[np].transpose, parameter[call[name[np].dot, parameter[name[self], call[name[np].transpose, parameter[name[old_latt]]]]]]]
name[def_struct].lattice assign[=] call[name[Lattice], parameter[name[new_latt]]]
return[name[def_struct]] | keyword[def] identifier[apply_to_structure] ( identifier[self] , identifier[structure] ):
literal[string]
identifier[def_struct] = identifier[structure] . identifier[copy] ()
identifier[old_latt] = identifier[def_struct] . identifier[lattice] . identifier[matrix]
identifier[new_latt] = identifier[np] . identifier[transpose] ( identifier[np] . identifier[dot] ( identifier[self] , identifier[np] . identifier[transpose] ( identifier[old_latt] )))
identifier[def_struct] . identifier[lattice] = identifier[Lattice] ( identifier[new_latt] )
keyword[return] identifier[def_struct] | def apply_to_structure(self, structure):
"""
Apply the deformation gradient to a structure.
Args:
structure (Structure object): the structure object to
be modified by the deformation
"""
def_struct = structure.copy()
old_latt = def_struct.lattice.matrix
new_latt = np.transpose(np.dot(self, np.transpose(old_latt)))
def_struct.lattice = Lattice(new_latt)
return def_struct |
def spkw15(handle, body, center, inframe, first, last, segid, epoch, tp, pa, p,
ecc, j2flg, pv, gm, j2, radius):
"""
Write an SPK segment of type 15 given a type 15 data record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw15_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param epoch: Epoch of the periapse.
:type epoch: float
:param tp: Trajectory pole vector.
:type tp: 3-Element Array of floats
:param pa: Periapsis vector.
:type pa: 3-Element Array of floats
:param p: Semi-latus rectum.
:type p: float
:param ecc: Eccentricity.
:type ecc: float
:param j2flg: J2 processing flag.
:type j2flg: float
:param pv: Central body pole vector.
:type pv: 3-Element Array of floats
:param gm: Central body GM.
:type gm: float
:param j2: Central body J2.
:type j2: float
:param radius: Equatorial radius of central body.
:type radius: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
epoch = ctypes.c_double(epoch)
tp = stypes.toDoubleVector(tp)
pa = stypes.toDoubleVector(pa)
p = ctypes.c_double(p)
ecc = ctypes.c_double(ecc)
j2flg = ctypes.c_double(j2flg)
pv = stypes.toDoubleVector(pv)
gm = ctypes.c_double(gm)
j2 = ctypes.c_double(j2)
radius = ctypes.c_double(radius)
libspice.spkw15_c(handle, body, center, inframe, first, last, segid, epoch,
tp, pa, p, ecc, j2flg, pv, gm, j2, radius) | def function[spkw15, parameter[handle, body, center, inframe, first, last, segid, epoch, tp, pa, p, ecc, j2flg, pv, gm, j2, radius]]:
constant[
Write an SPK segment of type 15 given a type 15 data record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw15_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param epoch: Epoch of the periapse.
:type epoch: float
:param tp: Trajectory pole vector.
:type tp: 3-Element Array of floats
:param pa: Periapsis vector.
:type pa: 3-Element Array of floats
:param p: Semi-latus rectum.
:type p: float
:param ecc: Eccentricity.
:type ecc: float
:param j2flg: J2 processing flag.
:type j2flg: float
:param pv: Central body pole vector.
:type pv: 3-Element Array of floats
:param gm: Central body GM.
:type gm: float
:param j2: Central body J2.
:type j2: float
:param radius: Equatorial radius of central body.
:type radius: float
]
variable[handle] assign[=] call[name[ctypes].c_int, parameter[name[handle]]]
variable[body] assign[=] call[name[ctypes].c_int, parameter[name[body]]]
variable[center] assign[=] call[name[ctypes].c_int, parameter[name[center]]]
variable[inframe] assign[=] call[name[stypes].stringToCharP, parameter[name[inframe]]]
variable[first] assign[=] call[name[ctypes].c_double, parameter[name[first]]]
variable[last] assign[=] call[name[ctypes].c_double, parameter[name[last]]]
variable[segid] assign[=] call[name[stypes].stringToCharP, parameter[name[segid]]]
variable[epoch] assign[=] call[name[ctypes].c_double, parameter[name[epoch]]]
variable[tp] assign[=] call[name[stypes].toDoubleVector, parameter[name[tp]]]
variable[pa] assign[=] call[name[stypes].toDoubleVector, parameter[name[pa]]]
variable[p] assign[=] call[name[ctypes].c_double, parameter[name[p]]]
variable[ecc] assign[=] call[name[ctypes].c_double, parameter[name[ecc]]]
variable[j2flg] assign[=] call[name[ctypes].c_double, parameter[name[j2flg]]]
variable[pv] assign[=] call[name[stypes].toDoubleVector, parameter[name[pv]]]
variable[gm] assign[=] call[name[ctypes].c_double, parameter[name[gm]]]
variable[j2] assign[=] call[name[ctypes].c_double, parameter[name[j2]]]
variable[radius] assign[=] call[name[ctypes].c_double, parameter[name[radius]]]
call[name[libspice].spkw15_c, parameter[name[handle], name[body], name[center], name[inframe], name[first], name[last], name[segid], name[epoch], name[tp], name[pa], name[p], name[ecc], name[j2flg], name[pv], name[gm], name[j2], name[radius]]] | keyword[def] identifier[spkw15] ( identifier[handle] , identifier[body] , identifier[center] , identifier[inframe] , identifier[first] , identifier[last] , identifier[segid] , identifier[epoch] , identifier[tp] , identifier[pa] , identifier[p] ,
identifier[ecc] , identifier[j2flg] , identifier[pv] , identifier[gm] , identifier[j2] , identifier[radius] ):
literal[string]
identifier[handle] = identifier[ctypes] . identifier[c_int] ( identifier[handle] )
identifier[body] = identifier[ctypes] . identifier[c_int] ( identifier[body] )
identifier[center] = identifier[ctypes] . identifier[c_int] ( identifier[center] )
identifier[inframe] = identifier[stypes] . identifier[stringToCharP] ( identifier[inframe] )
identifier[first] = identifier[ctypes] . identifier[c_double] ( identifier[first] )
identifier[last] = identifier[ctypes] . identifier[c_double] ( identifier[last] )
identifier[segid] = identifier[stypes] . identifier[stringToCharP] ( identifier[segid] )
identifier[epoch] = identifier[ctypes] . identifier[c_double] ( identifier[epoch] )
identifier[tp] = identifier[stypes] . identifier[toDoubleVector] ( identifier[tp] )
identifier[pa] = identifier[stypes] . identifier[toDoubleVector] ( identifier[pa] )
identifier[p] = identifier[ctypes] . identifier[c_double] ( identifier[p] )
identifier[ecc] = identifier[ctypes] . identifier[c_double] ( identifier[ecc] )
identifier[j2flg] = identifier[ctypes] . identifier[c_double] ( identifier[j2flg] )
identifier[pv] = identifier[stypes] . identifier[toDoubleVector] ( identifier[pv] )
identifier[gm] = identifier[ctypes] . identifier[c_double] ( identifier[gm] )
identifier[j2] = identifier[ctypes] . identifier[c_double] ( identifier[j2] )
identifier[radius] = identifier[ctypes] . identifier[c_double] ( identifier[radius] )
identifier[libspice] . identifier[spkw15_c] ( identifier[handle] , identifier[body] , identifier[center] , identifier[inframe] , identifier[first] , identifier[last] , identifier[segid] , identifier[epoch] ,
identifier[tp] , identifier[pa] , identifier[p] , identifier[ecc] , identifier[j2flg] , identifier[pv] , identifier[gm] , identifier[j2] , identifier[radius] ) | def spkw15(handle, body, center, inframe, first, last, segid, epoch, tp, pa, p, ecc, j2flg, pv, gm, j2, radius):
"""
Write an SPK segment of type 15 given a type 15 data record.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkw15_c.html
:param handle: Handle of an SPK file open for writing.
:type handle: int
:param body: Body code for ephemeris object.
:type body: int
:param center: Body code for the center of motion of the body.
:type center: int
:param inframe: The reference frame of the states.
:type inframe: str
:param first: First valid time for which states can be computed.
:type first: float
:param last: Last valid time for which states can be computed.
:type last: float
:param segid: Segment identifier.
:type segid: str
:param epoch: Epoch of the periapse.
:type epoch: float
:param tp: Trajectory pole vector.
:type tp: 3-Element Array of floats
:param pa: Periapsis vector.
:type pa: 3-Element Array of floats
:param p: Semi-latus rectum.
:type p: float
:param ecc: Eccentricity.
:type ecc: float
:param j2flg: J2 processing flag.
:type j2flg: float
:param pv: Central body pole vector.
:type pv: 3-Element Array of floats
:param gm: Central body GM.
:type gm: float
:param j2: Central body J2.
:type j2: float
:param radius: Equatorial radius of central body.
:type radius: float
"""
handle = ctypes.c_int(handle)
body = ctypes.c_int(body)
center = ctypes.c_int(center)
inframe = stypes.stringToCharP(inframe)
first = ctypes.c_double(first)
last = ctypes.c_double(last)
segid = stypes.stringToCharP(segid)
epoch = ctypes.c_double(epoch)
tp = stypes.toDoubleVector(tp)
pa = stypes.toDoubleVector(pa)
p = ctypes.c_double(p)
ecc = ctypes.c_double(ecc)
j2flg = ctypes.c_double(j2flg)
pv = stypes.toDoubleVector(pv)
gm = ctypes.c_double(gm)
j2 = ctypes.c_double(j2)
radius = ctypes.c_double(radius)
libspice.spkw15_c(handle, body, center, inframe, first, last, segid, epoch, tp, pa, p, ecc, j2flg, pv, gm, j2, radius) |
def rebuild_auth(self, prepared_request, response):
"""
When being redirected we should always strip Authorization
header, since nonce may not be reused as per OAuth spec.
"""
if "Authorization" in prepared_request.headers:
# If we get redirected to a new host, we should strip out
# any authentication headers.
prepared_request.headers.pop("Authorization", True)
prepared_request.prepare_auth(self.auth)
return | def function[rebuild_auth, parameter[self, prepared_request, response]]:
constant[
When being redirected we should always strip Authorization
header, since nonce may not be reused as per OAuth spec.
]
if compare[constant[Authorization] in name[prepared_request].headers] begin[:]
call[name[prepared_request].headers.pop, parameter[constant[Authorization], constant[True]]]
call[name[prepared_request].prepare_auth, parameter[name[self].auth]]
return[None] | keyword[def] identifier[rebuild_auth] ( identifier[self] , identifier[prepared_request] , identifier[response] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[prepared_request] . identifier[headers] :
identifier[prepared_request] . identifier[headers] . identifier[pop] ( literal[string] , keyword[True] )
identifier[prepared_request] . identifier[prepare_auth] ( identifier[self] . identifier[auth] )
keyword[return] | def rebuild_auth(self, prepared_request, response):
"""
When being redirected we should always strip Authorization
header, since nonce may not be reused as per OAuth spec.
"""
if 'Authorization' in prepared_request.headers:
# If we get redirected to a new host, we should strip out
# any authentication headers.
prepared_request.headers.pop('Authorization', True)
prepared_request.prepare_auth(self.auth) # depends on [control=['if'], data=[]]
return |
def _is_gs_folder(cls, result):
"""Return ``True`` if GS standalone folder object.
GS will create a 0 byte ``<FOLDER NAME>_$folder$`` key as a
pseudo-directory place holder if there are no files present.
"""
return (cls.is_key(result) and
result.size == 0 and
result.name.endswith(cls._gs_folder_suffix)) | def function[_is_gs_folder, parameter[cls, result]]:
constant[Return ``True`` if GS standalone folder object.
GS will create a 0 byte ``<FOLDER NAME>_$folder$`` key as a
pseudo-directory place holder if there are no files present.
]
return[<ast.BoolOp object at 0x7da18f00f1c0>] | keyword[def] identifier[_is_gs_folder] ( identifier[cls] , identifier[result] ):
literal[string]
keyword[return] ( identifier[cls] . identifier[is_key] ( identifier[result] ) keyword[and]
identifier[result] . identifier[size] == literal[int] keyword[and]
identifier[result] . identifier[name] . identifier[endswith] ( identifier[cls] . identifier[_gs_folder_suffix] )) | def _is_gs_folder(cls, result):
"""Return ``True`` if GS standalone folder object.
GS will create a 0 byte ``<FOLDER NAME>_$folder$`` key as a
pseudo-directory place holder if there are no files present.
"""
return cls.is_key(result) and result.size == 0 and result.name.endswith(cls._gs_folder_suffix) |
def rotate(obj, angle, **kwargs):
""" Rotates curves, surfaces or volumes about the chosen axis.
Keyword Arguments:
* ``axis``: rotation axis; x, y, z correspond to 0, 1, 2 respectively. *Default: 2*
* ``inplace``: if False, operation applied to a copy of the object. *Default: False*
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractGeometry
:param angle: angle of rotation (in degrees)
:type angle: float
:return: rotated geometry object
"""
def rotate_x(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [[0.0 for _ in range(ncs.dimension)] for _ in range(len(ncs.ctrlpts))]
for idx, pt in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = pt[0]
new_ctrlpts[idx][1] = (pt[1] * math.cos(rot)) - (pt[2] * math.sin(rot))
new_ctrlpts[idx][2] = (pt[2] * math.cos(rot)) + (pt[1] * math.sin(rot))
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
def rotate_y(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [[0.0 for _ in range(ncs.dimension)] for _ in range(len(ncs.ctrlpts))]
for idx, pt in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = (pt[0] * math.cos(rot)) - (pt[2] * math.sin(rot))
new_ctrlpts[idx][1] = pt[1]
new_ctrlpts[idx][2] = (pt[2] * math.cos(rot)) + (pt[0] * math.sin(rot))
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
def rotate_z(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [list(ncs.ctrlpts[i]) for i in range(len(ncs.ctrlpts))]
for idx, pt in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = (pt[0] * math.cos(rot)) - (pt[1] * math.sin(rot))
new_ctrlpts[idx][1] = (pt[1] * math.cos(rot)) + (pt[0] * math.sin(rot))
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
# Set rotation axis
axis = 2 if obj.dimension == 2 else int(kwargs.get('axis', 2))
if not 0 <= axis <= 2:
raise GeomdlException("Value of the 'axis' argument should be 0, 1 or 2")
rotfunc = (rotate_x, rotate_y, rotate_z)
# Operate on a copy or the actual object
inplace = kwargs.get('inplace', False)
if not inplace:
geom = copy.deepcopy(obj)
else:
geom = obj
# Set a single origin
if geom[0].pdimension == 1:
params = geom[0].domain[0]
else:
params = [geom[0].domain[i][0] for i in range(geom[0].pdimension)]
origin = geom[0].evaluate_single(params)
# Start rotation
for g in geom:
rotfunc[axis](g, origin, angle)
return geom | def function[rotate, parameter[obj, angle]]:
constant[ Rotates curves, surfaces or volumes about the chosen axis.
Keyword Arguments:
* ``axis``: rotation axis; x, y, z correspond to 0, 1, 2 respectively. *Default: 2*
* ``inplace``: if False, operation applied to a copy of the object. *Default: False*
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractGeometry
:param angle: angle of rotation (in degrees)
:type angle: float
:return: rotated geometry object
]
def function[rotate_x, parameter[ncs, opt, alpha]]:
variable[translate_vector] assign[=] call[name[linalg].vector_generate, parameter[name[opt], <ast.ListComp object at 0x7da1b1524940>]]
call[name[translate], parameter[name[ncs], name[translate_vector]]]
variable[rot] assign[=] call[name[math].radians, parameter[name[alpha]]]
variable[new_ctrlpts] assign[=] <ast.ListComp object at 0x7da1b1633070>
for taget[tuple[[<ast.Name object at 0x7da1b1633850>, <ast.Name object at 0x7da1b1632fe0>]]] in starred[call[name[enumerate], parameter[name[ncs].ctrlpts]]] begin[:]
call[call[name[new_ctrlpts]][name[idx]]][constant[0]] assign[=] call[name[pt]][constant[0]]
call[call[name[new_ctrlpts]][name[idx]]][constant[1]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[1]] * call[name[math].cos, parameter[name[rot]]]] - binary_operation[call[name[pt]][constant[2]] * call[name[math].sin, parameter[name[rot]]]]]
call[call[name[new_ctrlpts]][name[idx]]][constant[2]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[2]] * call[name[math].cos, parameter[name[rot]]]] + binary_operation[call[name[pt]][constant[1]] * call[name[math].sin, parameter[name[rot]]]]]
name[ncs].ctrlpts assign[=] name[new_ctrlpts]
call[name[translate], parameter[name[ncs], <ast.ListComp object at 0x7da1b16334f0>]]
def function[rotate_y, parameter[ncs, opt, alpha]]:
variable[translate_vector] assign[=] call[name[linalg].vector_generate, parameter[name[opt], <ast.ListComp object at 0x7da1b16339a0>]]
call[name[translate], parameter[name[ncs], name[translate_vector]]]
variable[rot] assign[=] call[name[math].radians, parameter[name[alpha]]]
variable[new_ctrlpts] assign[=] <ast.ListComp object at 0x7da1b16313f0>
for taget[tuple[[<ast.Name object at 0x7da1b16325c0>, <ast.Name object at 0x7da1b1632e90>]]] in starred[call[name[enumerate], parameter[name[ncs].ctrlpts]]] begin[:]
call[call[name[new_ctrlpts]][name[idx]]][constant[0]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[0]] * call[name[math].cos, parameter[name[rot]]]] - binary_operation[call[name[pt]][constant[2]] * call[name[math].sin, parameter[name[rot]]]]]
call[call[name[new_ctrlpts]][name[idx]]][constant[1]] assign[=] call[name[pt]][constant[1]]
call[call[name[new_ctrlpts]][name[idx]]][constant[2]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[2]] * call[name[math].cos, parameter[name[rot]]]] + binary_operation[call[name[pt]][constant[0]] * call[name[math].sin, parameter[name[rot]]]]]
name[ncs].ctrlpts assign[=] name[new_ctrlpts]
call[name[translate], parameter[name[ncs], <ast.ListComp object at 0x7da1b1721120>]]
def function[rotate_z, parameter[ncs, opt, alpha]]:
variable[translate_vector] assign[=] call[name[linalg].vector_generate, parameter[name[opt], <ast.ListComp object at 0x7da1b180c5b0>]]
call[name[translate], parameter[name[ncs], name[translate_vector]]]
variable[rot] assign[=] call[name[math].radians, parameter[name[alpha]]]
variable[new_ctrlpts] assign[=] <ast.ListComp object at 0x7da1b16def20>
for taget[tuple[[<ast.Name object at 0x7da1b16dcbe0>, <ast.Name object at 0x7da1b16de9b0>]]] in starred[call[name[enumerate], parameter[name[ncs].ctrlpts]]] begin[:]
call[call[name[new_ctrlpts]][name[idx]]][constant[0]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[0]] * call[name[math].cos, parameter[name[rot]]]] - binary_operation[call[name[pt]][constant[1]] * call[name[math].sin, parameter[name[rot]]]]]
call[call[name[new_ctrlpts]][name[idx]]][constant[1]] assign[=] binary_operation[binary_operation[call[name[pt]][constant[1]] * call[name[math].cos, parameter[name[rot]]]] + binary_operation[call[name[pt]][constant[0]] * call[name[math].sin, parameter[name[rot]]]]]
name[ncs].ctrlpts assign[=] name[new_ctrlpts]
call[name[translate], parameter[name[ncs], <ast.ListComp object at 0x7da1b16dc160>]]
variable[axis] assign[=] <ast.IfExp object at 0x7da1b16d13f0>
if <ast.UnaryOp object at 0x7da1b16d0700> begin[:]
<ast.Raise object at 0x7da1b16d0160>
variable[rotfunc] assign[=] tuple[[<ast.Name object at 0x7da1b16d1360>, <ast.Name object at 0x7da1b16d18d0>, <ast.Name object at 0x7da1b16d1450>]]
variable[inplace] assign[=] call[name[kwargs].get, parameter[constant[inplace], constant[False]]]
if <ast.UnaryOp object at 0x7da1b16d0520> begin[:]
variable[geom] assign[=] call[name[copy].deepcopy, parameter[name[obj]]]
if compare[call[name[geom]][constant[0]].pdimension equal[==] constant[1]] begin[:]
variable[params] assign[=] call[call[name[geom]][constant[0]].domain][constant[0]]
variable[origin] assign[=] call[call[name[geom]][constant[0]].evaluate_single, parameter[name[params]]]
for taget[name[g]] in starred[name[geom]] begin[:]
call[call[name[rotfunc]][name[axis]], parameter[name[g], name[origin], name[angle]]]
return[name[geom]] | keyword[def] identifier[rotate] ( identifier[obj] , identifier[angle] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[rotate_x] ( identifier[ncs] , identifier[opt] , identifier[alpha] ):
identifier[translate_vector] = identifier[linalg] . identifier[vector_generate] ( identifier[opt] ,[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[ncs] . identifier[dimension] )])
identifier[translate] ( identifier[ncs] , identifier[translate_vector] , identifier[inplace] = keyword[True] )
identifier[rot] = identifier[math] . identifier[radians] ( identifier[alpha] )
identifier[new_ctrlpts] =[[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[ncs] . identifier[dimension] )] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[ncs] . identifier[ctrlpts] ))]
keyword[for] identifier[idx] , identifier[pt] keyword[in] identifier[enumerate] ( identifier[ncs] . identifier[ctrlpts] ):
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]= identifier[pt] [ literal[int] ]
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))-( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))+( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[ncs] . identifier[ctrlpts] = identifier[new_ctrlpts]
identifier[translate] ( identifier[ncs] ,[- identifier[tv] keyword[for] identifier[tv] keyword[in] identifier[translate_vector] ], identifier[inplace] = keyword[True] )
keyword[def] identifier[rotate_y] ( identifier[ncs] , identifier[opt] , identifier[alpha] ):
identifier[translate_vector] = identifier[linalg] . identifier[vector_generate] ( identifier[opt] ,[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[ncs] . identifier[dimension] )])
identifier[translate] ( identifier[ncs] , identifier[translate_vector] , identifier[inplace] = keyword[True] )
identifier[rot] = identifier[math] . identifier[radians] ( identifier[alpha] )
identifier[new_ctrlpts] =[[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[ncs] . identifier[dimension] )] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[ncs] . identifier[ctrlpts] ))]
keyword[for] identifier[idx] , identifier[pt] keyword[in] identifier[enumerate] ( identifier[ncs] . identifier[ctrlpts] ):
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))-( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]= identifier[pt] [ literal[int] ]
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))+( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[ncs] . identifier[ctrlpts] = identifier[new_ctrlpts]
identifier[translate] ( identifier[ncs] ,[- identifier[tv] keyword[for] identifier[tv] keyword[in] identifier[translate_vector] ], identifier[inplace] = keyword[True] )
keyword[def] identifier[rotate_z] ( identifier[ncs] , identifier[opt] , identifier[alpha] ):
identifier[translate_vector] = identifier[linalg] . identifier[vector_generate] ( identifier[opt] ,[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[ncs] . identifier[dimension] )])
identifier[translate] ( identifier[ncs] , identifier[translate_vector] , identifier[inplace] = keyword[True] )
identifier[rot] = identifier[math] . identifier[radians] ( identifier[alpha] )
identifier[new_ctrlpts] =[ identifier[list] ( identifier[ncs] . identifier[ctrlpts] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ncs] . identifier[ctrlpts] ))]
keyword[for] identifier[idx] , identifier[pt] keyword[in] identifier[enumerate] ( identifier[ncs] . identifier[ctrlpts] ):
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))-( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[new_ctrlpts] [ identifier[idx] ][ literal[int] ]=( identifier[pt] [ literal[int] ]* identifier[math] . identifier[cos] ( identifier[rot] ))+( identifier[pt] [ literal[int] ]* identifier[math] . identifier[sin] ( identifier[rot] ))
identifier[ncs] . identifier[ctrlpts] = identifier[new_ctrlpts]
identifier[translate] ( identifier[ncs] ,[- identifier[tv] keyword[for] identifier[tv] keyword[in] identifier[translate_vector] ], identifier[inplace] = keyword[True] )
identifier[axis] = literal[int] keyword[if] identifier[obj] . identifier[dimension] == literal[int] keyword[else] identifier[int] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ))
keyword[if] keyword[not] literal[int] <= identifier[axis] <= literal[int] :
keyword[raise] identifier[GeomdlException] ( literal[string] )
identifier[rotfunc] =( identifier[rotate_x] , identifier[rotate_y] , identifier[rotate_z] )
identifier[inplace] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[if] keyword[not] identifier[inplace] :
identifier[geom] = identifier[copy] . identifier[deepcopy] ( identifier[obj] )
keyword[else] :
identifier[geom] = identifier[obj]
keyword[if] identifier[geom] [ literal[int] ]. identifier[pdimension] == literal[int] :
identifier[params] = identifier[geom] [ literal[int] ]. identifier[domain] [ literal[int] ]
keyword[else] :
identifier[params] =[ identifier[geom] [ literal[int] ]. identifier[domain] [ identifier[i] ][ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[geom] [ literal[int] ]. identifier[pdimension] )]
identifier[origin] = identifier[geom] [ literal[int] ]. identifier[evaluate_single] ( identifier[params] )
keyword[for] identifier[g] keyword[in] identifier[geom] :
identifier[rotfunc] [ identifier[axis] ]( identifier[g] , identifier[origin] , identifier[angle] )
keyword[return] identifier[geom] | def rotate(obj, angle, **kwargs):
""" Rotates curves, surfaces or volumes about the chosen axis.
Keyword Arguments:
* ``axis``: rotation axis; x, y, z correspond to 0, 1, 2 respectively. *Default: 2*
* ``inplace``: if False, operation applied to a copy of the object. *Default: False*
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractGeometry
:param angle: angle of rotation (in degrees)
:type angle: float
:return: rotated geometry object
"""
def rotate_x(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [[0.0 for _ in range(ncs.dimension)] for _ in range(len(ncs.ctrlpts))]
for (idx, pt) in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = pt[0]
new_ctrlpts[idx][1] = pt[1] * math.cos(rot) - pt[2] * math.sin(rot)
new_ctrlpts[idx][2] = pt[2] * math.cos(rot) + pt[1] * math.sin(rot) # depends on [control=['for'], data=[]]
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
def rotate_y(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [[0.0 for _ in range(ncs.dimension)] for _ in range(len(ncs.ctrlpts))]
for (idx, pt) in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = pt[0] * math.cos(rot) - pt[2] * math.sin(rot)
new_ctrlpts[idx][1] = pt[1]
new_ctrlpts[idx][2] = pt[2] * math.cos(rot) + pt[0] * math.sin(rot) # depends on [control=['for'], data=[]]
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
def rotate_z(ncs, opt, alpha):
# Generate translation vector
translate_vector = linalg.vector_generate(opt, [0.0 for _ in range(ncs.dimension)])
# Translate to the origin
translate(ncs, translate_vector, inplace=True)
# Then, rotate about the axis
rot = math.radians(alpha)
new_ctrlpts = [list(ncs.ctrlpts[i]) for i in range(len(ncs.ctrlpts))]
for (idx, pt) in enumerate(ncs.ctrlpts):
new_ctrlpts[idx][0] = pt[0] * math.cos(rot) - pt[1] * math.sin(rot)
new_ctrlpts[idx][1] = pt[1] * math.cos(rot) + pt[0] * math.sin(rot) # depends on [control=['for'], data=[]]
ncs.ctrlpts = new_ctrlpts
# Finally, translate back to the starting location
translate(ncs, [-tv for tv in translate_vector], inplace=True)
# Set rotation axis
axis = 2 if obj.dimension == 2 else int(kwargs.get('axis', 2))
if not 0 <= axis <= 2:
raise GeomdlException("Value of the 'axis' argument should be 0, 1 or 2") # depends on [control=['if'], data=[]]
rotfunc = (rotate_x, rotate_y, rotate_z)
# Operate on a copy or the actual object
inplace = kwargs.get('inplace', False)
if not inplace:
geom = copy.deepcopy(obj) # depends on [control=['if'], data=[]]
else:
geom = obj
# Set a single origin
if geom[0].pdimension == 1:
params = geom[0].domain[0] # depends on [control=['if'], data=[]]
else:
params = [geom[0].domain[i][0] for i in range(geom[0].pdimension)]
origin = geom[0].evaluate_single(params)
# Start rotation
for g in geom:
rotfunc[axis](g, origin, angle) # depends on [control=['for'], data=['g']]
return geom |
def namespace_uri(self, name):
"""
:param string name: the name of an attribute to look up.
:return: the namespace URI associated with the named attribute,
or None.
"""
a_node = self.adapter.get_node_attribute_node(self.impl_element, name)
if a_node is None:
return None
return self.adapter.get_node_namespace_uri(a_node) | def function[namespace_uri, parameter[self, name]]:
constant[
:param string name: the name of an attribute to look up.
:return: the namespace URI associated with the named attribute,
or None.
]
variable[a_node] assign[=] call[name[self].adapter.get_node_attribute_node, parameter[name[self].impl_element, name[name]]]
if compare[name[a_node] is constant[None]] begin[:]
return[constant[None]]
return[call[name[self].adapter.get_node_namespace_uri, parameter[name[a_node]]]] | keyword[def] identifier[namespace_uri] ( identifier[self] , identifier[name] ):
literal[string]
identifier[a_node] = identifier[self] . identifier[adapter] . identifier[get_node_attribute_node] ( identifier[self] . identifier[impl_element] , identifier[name] )
keyword[if] identifier[a_node] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[adapter] . identifier[get_node_namespace_uri] ( identifier[a_node] ) | def namespace_uri(self, name):
"""
:param string name: the name of an attribute to look up.
:return: the namespace URI associated with the named attribute,
or None.
"""
a_node = self.adapter.get_node_attribute_node(self.impl_element, name)
if a_node is None:
return None # depends on [control=['if'], data=[]]
return self.adapter.get_node_namespace_uri(a_node) |
def addCompletedJob(self, job, wallTime):
"""
Adds the shape of a completed job to the queue, allowing the scalar to use the last N
completed jobs in factoring how many nodes are required in the cluster.
:param toil.job.JobNode job: The memory, core and disk requirements of the completed job
:param int wallTime: The wall-time taken to complete the job in seconds.
"""
#Adjust average runtimes to include this job.
if job.jobName in self.jobNameToAvgRuntime:
prevAvg = self.jobNameToAvgRuntime[job.jobName]
prevNum = self.jobNameToNumCompleted[job.jobName]
self.jobNameToAvgRuntime[job.jobName] = float(prevAvg*prevNum + wallTime)/(prevNum + 1)
self.jobNameToNumCompleted[job.jobName] += 1
else:
self.jobNameToAvgRuntime[job.jobName] = wallTime
self.jobNameToNumCompleted[job.jobName] = 1
self.totalJobsCompleted += 1
self.totalAvgRuntime = float(self.totalAvgRuntime * (self.totalJobsCompleted - 1) + \
wallTime)/self.totalJobsCompleted | def function[addCompletedJob, parameter[self, job, wallTime]]:
constant[
Adds the shape of a completed job to the queue, allowing the scalar to use the last N
completed jobs in factoring how many nodes are required in the cluster.
:param toil.job.JobNode job: The memory, core and disk requirements of the completed job
:param int wallTime: The wall-time taken to complete the job in seconds.
]
if compare[name[job].jobName in name[self].jobNameToAvgRuntime] begin[:]
variable[prevAvg] assign[=] call[name[self].jobNameToAvgRuntime][name[job].jobName]
variable[prevNum] assign[=] call[name[self].jobNameToNumCompleted][name[job].jobName]
call[name[self].jobNameToAvgRuntime][name[job].jobName] assign[=] binary_operation[call[name[float], parameter[binary_operation[binary_operation[name[prevAvg] * name[prevNum]] + name[wallTime]]]] / binary_operation[name[prevNum] + constant[1]]]
<ast.AugAssign object at 0x7da1b1eeeb00>
<ast.AugAssign object at 0x7da18f58d900>
name[self].totalAvgRuntime assign[=] binary_operation[call[name[float], parameter[binary_operation[binary_operation[name[self].totalAvgRuntime * binary_operation[name[self].totalJobsCompleted - constant[1]]] + name[wallTime]]]] / name[self].totalJobsCompleted] | keyword[def] identifier[addCompletedJob] ( identifier[self] , identifier[job] , identifier[wallTime] ):
literal[string]
keyword[if] identifier[job] . identifier[jobName] keyword[in] identifier[self] . identifier[jobNameToAvgRuntime] :
identifier[prevAvg] = identifier[self] . identifier[jobNameToAvgRuntime] [ identifier[job] . identifier[jobName] ]
identifier[prevNum] = identifier[self] . identifier[jobNameToNumCompleted] [ identifier[job] . identifier[jobName] ]
identifier[self] . identifier[jobNameToAvgRuntime] [ identifier[job] . identifier[jobName] ]= identifier[float] ( identifier[prevAvg] * identifier[prevNum] + identifier[wallTime] )/( identifier[prevNum] + literal[int] )
identifier[self] . identifier[jobNameToNumCompleted] [ identifier[job] . identifier[jobName] ]+= literal[int]
keyword[else] :
identifier[self] . identifier[jobNameToAvgRuntime] [ identifier[job] . identifier[jobName] ]= identifier[wallTime]
identifier[self] . identifier[jobNameToNumCompleted] [ identifier[job] . identifier[jobName] ]= literal[int]
identifier[self] . identifier[totalJobsCompleted] += literal[int]
identifier[self] . identifier[totalAvgRuntime] = identifier[float] ( identifier[self] . identifier[totalAvgRuntime] *( identifier[self] . identifier[totalJobsCompleted] - literal[int] )+ identifier[wallTime] )/ identifier[self] . identifier[totalJobsCompleted] | def addCompletedJob(self, job, wallTime):
"""
Adds the shape of a completed job to the queue, allowing the scalar to use the last N
completed jobs in factoring how many nodes are required in the cluster.
:param toil.job.JobNode job: The memory, core and disk requirements of the completed job
:param int wallTime: The wall-time taken to complete the job in seconds.
"""
#Adjust average runtimes to include this job.
if job.jobName in self.jobNameToAvgRuntime:
prevAvg = self.jobNameToAvgRuntime[job.jobName]
prevNum = self.jobNameToNumCompleted[job.jobName]
self.jobNameToAvgRuntime[job.jobName] = float(prevAvg * prevNum + wallTime) / (prevNum + 1)
self.jobNameToNumCompleted[job.jobName] += 1 # depends on [control=['if'], data=[]]
else:
self.jobNameToAvgRuntime[job.jobName] = wallTime
self.jobNameToNumCompleted[job.jobName] = 1
self.totalJobsCompleted += 1
self.totalAvgRuntime = float(self.totalAvgRuntime * (self.totalJobsCompleted - 1) + wallTime) / self.totalJobsCompleted |
def plot(self, type="roc", server=False):
"""
Produce the desired metric plot.
:param type: the type of metric plot (currently, only ROC supported).
:param server: if True, generate plot inline using matplotlib's "Agg" backend.
:returns: None
"""
# TODO: add more types (i.e. cutoffs)
assert_is_type(type, "roc")
# check for matplotlib. exit if absent.
try:
imp.find_module('matplotlib')
import matplotlib
if server: matplotlib.use('Agg', warn=False)
import matplotlib.pyplot as plt
except ImportError:
print("matplotlib is required for this function!")
return
if type == "roc":
plt.xlabel('False Positive Rate (FPR)')
plt.ylabel('True Positive Rate (TPR)')
plt.title('ROC Curve')
plt.text(0.5, 0.5, r'AUC={0:.4f}'.format(self._metric_json["AUC"]))
plt.plot(self.fprs, self.tprs, 'b--')
plt.axis([0, 1, 0, 1])
if not server: plt.show() | def function[plot, parameter[self, type, server]]:
constant[
Produce the desired metric plot.
:param type: the type of metric plot (currently, only ROC supported).
:param server: if True, generate plot inline using matplotlib's "Agg" backend.
:returns: None
]
call[name[assert_is_type], parameter[name[type], constant[roc]]]
<ast.Try object at 0x7da18dc9b880>
if compare[name[type] equal[==] constant[roc]] begin[:]
call[name[plt].xlabel, parameter[constant[False Positive Rate (FPR)]]]
call[name[plt].ylabel, parameter[constant[True Positive Rate (TPR)]]]
call[name[plt].title, parameter[constant[ROC Curve]]]
call[name[plt].text, parameter[constant[0.5], constant[0.5], call[constant[AUC={0:.4f}].format, parameter[call[name[self]._metric_json][constant[AUC]]]]]]
call[name[plt].plot, parameter[name[self].fprs, name[self].tprs, constant[b--]]]
call[name[plt].axis, parameter[list[[<ast.Constant object at 0x7da18fe90490>, <ast.Constant object at 0x7da18fe91750>, <ast.Constant object at 0x7da18fe909d0>, <ast.Constant object at 0x7da18fe91de0>]]]]
if <ast.UnaryOp object at 0x7da18fe93280> begin[:]
call[name[plt].show, parameter[]] | keyword[def] identifier[plot] ( identifier[self] , identifier[type] = literal[string] , identifier[server] = keyword[False] ):
literal[string]
identifier[assert_is_type] ( identifier[type] , literal[string] )
keyword[try] :
identifier[imp] . identifier[find_module] ( literal[string] )
keyword[import] identifier[matplotlib]
keyword[if] identifier[server] : identifier[matplotlib] . identifier[use] ( literal[string] , identifier[warn] = keyword[False] )
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[except] identifier[ImportError] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] identifier[type] == literal[string] :
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] . identifier[title] ( literal[string] )
identifier[plt] . identifier[text] ( literal[int] , literal[int] , literal[string] . identifier[format] ( identifier[self] . identifier[_metric_json] [ literal[string] ]))
identifier[plt] . identifier[plot] ( identifier[self] . identifier[fprs] , identifier[self] . identifier[tprs] , literal[string] )
identifier[plt] . identifier[axis] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[if] keyword[not] identifier[server] : identifier[plt] . identifier[show] () | def plot(self, type='roc', server=False):
"""
Produce the desired metric plot.
:param type: the type of metric plot (currently, only ROC supported).
:param server: if True, generate plot inline using matplotlib's "Agg" backend.
:returns: None
"""
# TODO: add more types (i.e. cutoffs)
assert_is_type(type, 'roc')
# check for matplotlib. exit if absent.
try:
imp.find_module('matplotlib')
import matplotlib
if server:
matplotlib.use('Agg', warn=False) # depends on [control=['if'], data=[]]
import matplotlib.pyplot as plt # depends on [control=['try'], data=[]]
except ImportError:
print('matplotlib is required for this function!')
return # depends on [control=['except'], data=[]]
if type == 'roc':
plt.xlabel('False Positive Rate (FPR)')
plt.ylabel('True Positive Rate (TPR)')
plt.title('ROC Curve')
plt.text(0.5, 0.5, 'AUC={0:.4f}'.format(self._metric_json['AUC']))
plt.plot(self.fprs, self.tprs, 'b--')
plt.axis([0, 1, 0, 1])
if not server:
plt.show() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def butter_filter(cutoff, fs, order=5, btype='low'):
'''Create a digital butter fileter with cutoff frequency in Hz
Args
----
cutoff: float
Cutoff frequency where filter should separate signals
fs: float
sampling frequency
btype: str
Type of filter type to create. 'low' creates a low-frequency filter and
'high' creates a high-frequency filter (Default 'low).
Returns
-------
b: ndarray
Numerator polynomials of the IIR butter filter
a: ndarray
Denominator polynomials of the IIR butter filter
Notes
-----
This function was adapted from the following StackOverflow answer:
http://stackoverflow.com/a/25192640/943773
'''
import scipy.signal
nyq = 0.5 * fs
normal_cutoff = cutoff / nyq
b, a = scipy.signal.butter(order, normal_cutoff, btype=btype, analog=False)
return b, a | def function[butter_filter, parameter[cutoff, fs, order, btype]]:
constant[Create a digital butter fileter with cutoff frequency in Hz
Args
----
cutoff: float
Cutoff frequency where filter should separate signals
fs: float
sampling frequency
btype: str
Type of filter type to create. 'low' creates a low-frequency filter and
'high' creates a high-frequency filter (Default 'low).
Returns
-------
b: ndarray
Numerator polynomials of the IIR butter filter
a: ndarray
Denominator polynomials of the IIR butter filter
Notes
-----
This function was adapted from the following StackOverflow answer:
http://stackoverflow.com/a/25192640/943773
]
import module[scipy.signal]
variable[nyq] assign[=] binary_operation[constant[0.5] * name[fs]]
variable[normal_cutoff] assign[=] binary_operation[name[cutoff] / name[nyq]]
<ast.Tuple object at 0x7da1b143f460> assign[=] call[name[scipy].signal.butter, parameter[name[order], name[normal_cutoff]]]
return[tuple[[<ast.Name object at 0x7da1b143f7c0>, <ast.Name object at 0x7da1b143ed40>]]] | keyword[def] identifier[butter_filter] ( identifier[cutoff] , identifier[fs] , identifier[order] = literal[int] , identifier[btype] = literal[string] ):
literal[string]
keyword[import] identifier[scipy] . identifier[signal]
identifier[nyq] = literal[int] * identifier[fs]
identifier[normal_cutoff] = identifier[cutoff] / identifier[nyq]
identifier[b] , identifier[a] = identifier[scipy] . identifier[signal] . identifier[butter] ( identifier[order] , identifier[normal_cutoff] , identifier[btype] = identifier[btype] , identifier[analog] = keyword[False] )
keyword[return] identifier[b] , identifier[a] | def butter_filter(cutoff, fs, order=5, btype='low'):
"""Create a digital butter fileter with cutoff frequency in Hz
Args
----
cutoff: float
Cutoff frequency where filter should separate signals
fs: float
sampling frequency
btype: str
Type of filter type to create. 'low' creates a low-frequency filter and
'high' creates a high-frequency filter (Default 'low).
Returns
-------
b: ndarray
Numerator polynomials of the IIR butter filter
a: ndarray
Denominator polynomials of the IIR butter filter
Notes
-----
This function was adapted from the following StackOverflow answer:
http://stackoverflow.com/a/25192640/943773
"""
import scipy.signal
nyq = 0.5 * fs
normal_cutoff = cutoff / nyq
(b, a) = scipy.signal.butter(order, normal_cutoff, btype=btype, analog=False)
return (b, a) |
def rectangle_props(event):
"""
Returns the width, height, left, and bottom of a rectangle artist.
Parameters
-----------
event : PickEvent
The pick event to process
Returns
--------
A dict with keys:
`width` : The width of the rectangle
`height` : The height of the rectangle
`left` : The minimum x-coordinate of the rectangle
`right` : The maximum x-coordinate of the rectangle
`bottom` : The minimum y-coordinate of the rectangle
`top` : The maximum y-coordinate of the rectangle
`xcenter` : The mean x-coordinate of the rectangle
`ycenter` : The mean y-coordinate of the rectangle
`label` : The label for the rectangle or None
"""
artist = event.artist
width, height = artist.get_width(), artist.get_height()
left, bottom = artist.xy
right, top = left + width, bottom + height
xcenter = left + 0.5 * width
ycenter = bottom + 0.5 * height
label = artist.get_label()
if label is None or label.startswith('_nolegend'):
try:
label = artist._mpldatacursor_label
except AttributeError:
label = None
return dict(width=width, height=height, left=left, bottom=bottom,
label=label, right=right, top=top,
xcenter=xcenter, ycenter=ycenter) | def function[rectangle_props, parameter[event]]:
constant[
Returns the width, height, left, and bottom of a rectangle artist.
Parameters
-----------
event : PickEvent
The pick event to process
Returns
--------
A dict with keys:
`width` : The width of the rectangle
`height` : The height of the rectangle
`left` : The minimum x-coordinate of the rectangle
`right` : The maximum x-coordinate of the rectangle
`bottom` : The minimum y-coordinate of the rectangle
`top` : The maximum y-coordinate of the rectangle
`xcenter` : The mean x-coordinate of the rectangle
`ycenter` : The mean y-coordinate of the rectangle
`label` : The label for the rectangle or None
]
variable[artist] assign[=] name[event].artist
<ast.Tuple object at 0x7da1b0efbc10> assign[=] tuple[[<ast.Call object at 0x7da1b0ef9540>, <ast.Call object at 0x7da1b0efaad0>]]
<ast.Tuple object at 0x7da1b0efa9e0> assign[=] name[artist].xy
<ast.Tuple object at 0x7da1b0efb2e0> assign[=] tuple[[<ast.BinOp object at 0x7da1b0ef9360>, <ast.BinOp object at 0x7da1b0ef9e70>]]
variable[xcenter] assign[=] binary_operation[name[left] + binary_operation[constant[0.5] * name[width]]]
variable[ycenter] assign[=] binary_operation[name[bottom] + binary_operation[constant[0.5] * name[height]]]
variable[label] assign[=] call[name[artist].get_label, parameter[]]
if <ast.BoolOp object at 0x7da1b0ef96c0> begin[:]
<ast.Try object at 0x7da1b0efb310>
return[call[name[dict], parameter[]]] | keyword[def] identifier[rectangle_props] ( identifier[event] ):
literal[string]
identifier[artist] = identifier[event] . identifier[artist]
identifier[width] , identifier[height] = identifier[artist] . identifier[get_width] (), identifier[artist] . identifier[get_height] ()
identifier[left] , identifier[bottom] = identifier[artist] . identifier[xy]
identifier[right] , identifier[top] = identifier[left] + identifier[width] , identifier[bottom] + identifier[height]
identifier[xcenter] = identifier[left] + literal[int] * identifier[width]
identifier[ycenter] = identifier[bottom] + literal[int] * identifier[height]
identifier[label] = identifier[artist] . identifier[get_label] ()
keyword[if] identifier[label] keyword[is] keyword[None] keyword[or] identifier[label] . identifier[startswith] ( literal[string] ):
keyword[try] :
identifier[label] = identifier[artist] . identifier[_mpldatacursor_label]
keyword[except] identifier[AttributeError] :
identifier[label] = keyword[None]
keyword[return] identifier[dict] ( identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[left] = identifier[left] , identifier[bottom] = identifier[bottom] ,
identifier[label] = identifier[label] , identifier[right] = identifier[right] , identifier[top] = identifier[top] ,
identifier[xcenter] = identifier[xcenter] , identifier[ycenter] = identifier[ycenter] ) | def rectangle_props(event):
"""
Returns the width, height, left, and bottom of a rectangle artist.
Parameters
-----------
event : PickEvent
The pick event to process
Returns
--------
A dict with keys:
`width` : The width of the rectangle
`height` : The height of the rectangle
`left` : The minimum x-coordinate of the rectangle
`right` : The maximum x-coordinate of the rectangle
`bottom` : The minimum y-coordinate of the rectangle
`top` : The maximum y-coordinate of the rectangle
`xcenter` : The mean x-coordinate of the rectangle
`ycenter` : The mean y-coordinate of the rectangle
`label` : The label for the rectangle or None
"""
artist = event.artist
(width, height) = (artist.get_width(), artist.get_height())
(left, bottom) = artist.xy
(right, top) = (left + width, bottom + height)
xcenter = left + 0.5 * width
ycenter = bottom + 0.5 * height
label = artist.get_label()
if label is None or label.startswith('_nolegend'):
try:
label = artist._mpldatacursor_label # depends on [control=['try'], data=[]]
except AttributeError:
label = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return dict(width=width, height=height, left=left, bottom=bottom, label=label, right=right, top=top, xcenter=xcenter, ycenter=ycenter) |
def export_public_keys(self, identities):
"""Export SSH public keys from the device."""
public_keys = []
with self.device:
for i in identities:
pubkey = self.device.pubkey(identity=i)
vk = formats.decompress_pubkey(pubkey=pubkey,
curve_name=i.curve_name)
public_key = formats.export_public_key(vk=vk,
label=i.to_string())
public_keys.append(public_key)
return public_keys | def function[export_public_keys, parameter[self, identities]]:
constant[Export SSH public keys from the device.]
variable[public_keys] assign[=] list[[]]
with name[self].device begin[:]
for taget[name[i]] in starred[name[identities]] begin[:]
variable[pubkey] assign[=] call[name[self].device.pubkey, parameter[]]
variable[vk] assign[=] call[name[formats].decompress_pubkey, parameter[]]
variable[public_key] assign[=] call[name[formats].export_public_key, parameter[]]
call[name[public_keys].append, parameter[name[public_key]]]
return[name[public_keys]] | keyword[def] identifier[export_public_keys] ( identifier[self] , identifier[identities] ):
literal[string]
identifier[public_keys] =[]
keyword[with] identifier[self] . identifier[device] :
keyword[for] identifier[i] keyword[in] identifier[identities] :
identifier[pubkey] = identifier[self] . identifier[device] . identifier[pubkey] ( identifier[identity] = identifier[i] )
identifier[vk] = identifier[formats] . identifier[decompress_pubkey] ( identifier[pubkey] = identifier[pubkey] ,
identifier[curve_name] = identifier[i] . identifier[curve_name] )
identifier[public_key] = identifier[formats] . identifier[export_public_key] ( identifier[vk] = identifier[vk] ,
identifier[label] = identifier[i] . identifier[to_string] ())
identifier[public_keys] . identifier[append] ( identifier[public_key] )
keyword[return] identifier[public_keys] | def export_public_keys(self, identities):
"""Export SSH public keys from the device."""
public_keys = []
with self.device:
for i in identities:
pubkey = self.device.pubkey(identity=i)
vk = formats.decompress_pubkey(pubkey=pubkey, curve_name=i.curve_name)
public_key = formats.export_public_key(vk=vk, label=i.to_string())
public_keys.append(public_key) # depends on [control=['for'], data=['i']] # depends on [control=['with'], data=[]]
return public_keys |
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = StringIO(source)
return stream | def function[openStream, parameter[self, source]]:
constant[Produces a file object from source.
source can be either a file object, local filename or a string.
]
if call[name[hasattr], parameter[name[source], constant[read]]] begin[:]
variable[stream] assign[=] name[source]
return[name[stream]] | keyword[def] identifier[openStream] ( identifier[self] , identifier[source] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[source] , literal[string] ):
identifier[stream] = identifier[source]
keyword[else] :
identifier[stream] = identifier[StringIO] ( identifier[source] )
keyword[return] identifier[stream] | def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source # depends on [control=['if'], data=[]]
else:
stream = StringIO(source)
return stream |
def warn_message(self, message, fh=None, prefix="[warn]:", suffix="..."):
"""
print warn type message,
if file handle is `sys.stdout`, print color message
:param str message: message to print
:param file fh: file handle,default is `sys.stdout`
:param str prefix: message prefix,default is `[warn]`
:param str suffix: message suffix ,default is `...`
:return: None
"""
msg = prefix + message + suffix
fh = fh or sys.stdout
if fh is sys.stdout:
termcolor.cprint(msg, color="yellow")
else:
fh.write(msg)
pass | def function[warn_message, parameter[self, message, fh, prefix, suffix]]:
constant[
print warn type message,
if file handle is `sys.stdout`, print color message
:param str message: message to print
:param file fh: file handle,default is `sys.stdout`
:param str prefix: message prefix,default is `[warn]`
:param str suffix: message suffix ,default is `...`
:return: None
]
variable[msg] assign[=] binary_operation[binary_operation[name[prefix] + name[message]] + name[suffix]]
variable[fh] assign[=] <ast.BoolOp object at 0x7da20e954130>
if compare[name[fh] is name[sys].stdout] begin[:]
call[name[termcolor].cprint, parameter[name[msg]]]
pass | keyword[def] identifier[warn_message] ( identifier[self] , identifier[message] , identifier[fh] = keyword[None] , identifier[prefix] = literal[string] , identifier[suffix] = literal[string] ):
literal[string]
identifier[msg] = identifier[prefix] + identifier[message] + identifier[suffix]
identifier[fh] = identifier[fh] keyword[or] identifier[sys] . identifier[stdout]
keyword[if] identifier[fh] keyword[is] identifier[sys] . identifier[stdout] :
identifier[termcolor] . identifier[cprint] ( identifier[msg] , identifier[color] = literal[string] )
keyword[else] :
identifier[fh] . identifier[write] ( identifier[msg] )
keyword[pass] | def warn_message(self, message, fh=None, prefix='[warn]:', suffix='...'):
"""
print warn type message,
if file handle is `sys.stdout`, print color message
:param str message: message to print
:param file fh: file handle,default is `sys.stdout`
:param str prefix: message prefix,default is `[warn]`
:param str suffix: message suffix ,default is `...`
:return: None
"""
msg = prefix + message + suffix
fh = fh or sys.stdout
if fh is sys.stdout:
termcolor.cprint(msg, color='yellow') # depends on [control=['if'], data=[]]
else:
fh.write(msg)
pass |
def DecodeMessages(self, response_comms):
"""Extract and verify server message.
Args:
response_comms: A ClientCommunication rdfvalue
Returns:
list of messages and the CN where they came from.
Raises:
DecryptionError: If the message failed to decrypt properly.
"""
# Have we seen this cipher before?
cipher_verified = False
try:
cipher = self.encrypted_cipher_cache.Get(response_comms.encrypted_cipher)
stats_collector_instance.Get().IncrementCounter(
"grr_encrypted_cipher_cache", fields=["hits"])
# Even though we have seen this encrypted cipher already, we should still
# make sure that all the other fields are sane and verify the HMAC.
cipher.VerifyReceivedHMAC(response_comms)
cipher_verified = True
# If we have the cipher in the cache, we know the source and
# should have a corresponding public key.
source = cipher.GetSource()
remote_public_key = self._GetRemotePublicKey(source)
except KeyError:
stats_collector_instance.Get().IncrementCounter(
"grr_encrypted_cipher_cache", fields=["misses"])
cipher = ReceivedCipher(response_comms, self.private_key)
source = cipher.GetSource()
try:
remote_public_key = self._GetRemotePublicKey(source)
if cipher.VerifyCipherSignature(remote_public_key):
# At this point we know this cipher is legit, we can cache it.
self.encrypted_cipher_cache.Put(response_comms.encrypted_cipher,
cipher)
cipher_verified = True
except UnknownClientCertError:
# We don't know who we are talking to.
remote_public_key = None
# Decrypt the message with the per packet IV.
plain = cipher.Decrypt(response_comms.encrypted, response_comms.packet_iv)
try:
packed_message_list = rdf_flows.PackedMessageList.FromSerializedString(
plain)
except rdfvalue.DecodeError as e:
raise DecryptionError(e)
message_list = self.DecompressMessageList(packed_message_list)
# Are these messages authenticated?
# pyformat: disable
auth_state = self.VerifyMessageSignature(
response_comms,
packed_message_list,
cipher,
cipher_verified,
response_comms.api_version,
remote_public_key)
# pyformat: enable
# Mark messages as authenticated and where they came from.
for msg in message_list.job:
msg.auth_state = auth_state
msg.source = cipher.cipher_metadata.source
return (message_list.job, cipher.cipher_metadata.source,
packed_message_list.timestamp) | def function[DecodeMessages, parameter[self, response_comms]]:
constant[Extract and verify server message.
Args:
response_comms: A ClientCommunication rdfvalue
Returns:
list of messages and the CN where they came from.
Raises:
DecryptionError: If the message failed to decrypt properly.
]
variable[cipher_verified] assign[=] constant[False]
<ast.Try object at 0x7da1b1b87df0>
variable[plain] assign[=] call[name[cipher].Decrypt, parameter[name[response_comms].encrypted, name[response_comms].packet_iv]]
<ast.Try object at 0x7da18fe90820>
variable[message_list] assign[=] call[name[self].DecompressMessageList, parameter[name[packed_message_list]]]
variable[auth_state] assign[=] call[name[self].VerifyMessageSignature, parameter[name[response_comms], name[packed_message_list], name[cipher], name[cipher_verified], name[response_comms].api_version, name[remote_public_key]]]
for taget[name[msg]] in starred[name[message_list].job] begin[:]
name[msg].auth_state assign[=] name[auth_state]
name[msg].source assign[=] name[cipher].cipher_metadata.source
return[tuple[[<ast.Attribute object at 0x7da18fe90d00>, <ast.Attribute object at 0x7da18fe91120>, <ast.Attribute object at 0x7da18fe930a0>]]] | keyword[def] identifier[DecodeMessages] ( identifier[self] , identifier[response_comms] ):
literal[string]
identifier[cipher_verified] = keyword[False]
keyword[try] :
identifier[cipher] = identifier[self] . identifier[encrypted_cipher_cache] . identifier[Get] ( identifier[response_comms] . identifier[encrypted_cipher] )
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] (
literal[string] , identifier[fields] =[ literal[string] ])
identifier[cipher] . identifier[VerifyReceivedHMAC] ( identifier[response_comms] )
identifier[cipher_verified] = keyword[True]
identifier[source] = identifier[cipher] . identifier[GetSource] ()
identifier[remote_public_key] = identifier[self] . identifier[_GetRemotePublicKey] ( identifier[source] )
keyword[except] identifier[KeyError] :
identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] (
literal[string] , identifier[fields] =[ literal[string] ])
identifier[cipher] = identifier[ReceivedCipher] ( identifier[response_comms] , identifier[self] . identifier[private_key] )
identifier[source] = identifier[cipher] . identifier[GetSource] ()
keyword[try] :
identifier[remote_public_key] = identifier[self] . identifier[_GetRemotePublicKey] ( identifier[source] )
keyword[if] identifier[cipher] . identifier[VerifyCipherSignature] ( identifier[remote_public_key] ):
identifier[self] . identifier[encrypted_cipher_cache] . identifier[Put] ( identifier[response_comms] . identifier[encrypted_cipher] ,
identifier[cipher] )
identifier[cipher_verified] = keyword[True]
keyword[except] identifier[UnknownClientCertError] :
identifier[remote_public_key] = keyword[None]
identifier[plain] = identifier[cipher] . identifier[Decrypt] ( identifier[response_comms] . identifier[encrypted] , identifier[response_comms] . identifier[packet_iv] )
keyword[try] :
identifier[packed_message_list] = identifier[rdf_flows] . identifier[PackedMessageList] . identifier[FromSerializedString] (
identifier[plain] )
keyword[except] identifier[rdfvalue] . identifier[DecodeError] keyword[as] identifier[e] :
keyword[raise] identifier[DecryptionError] ( identifier[e] )
identifier[message_list] = identifier[self] . identifier[DecompressMessageList] ( identifier[packed_message_list] )
identifier[auth_state] = identifier[self] . identifier[VerifyMessageSignature] (
identifier[response_comms] ,
identifier[packed_message_list] ,
identifier[cipher] ,
identifier[cipher_verified] ,
identifier[response_comms] . identifier[api_version] ,
identifier[remote_public_key] )
keyword[for] identifier[msg] keyword[in] identifier[message_list] . identifier[job] :
identifier[msg] . identifier[auth_state] = identifier[auth_state]
identifier[msg] . identifier[source] = identifier[cipher] . identifier[cipher_metadata] . identifier[source]
keyword[return] ( identifier[message_list] . identifier[job] , identifier[cipher] . identifier[cipher_metadata] . identifier[source] ,
identifier[packed_message_list] . identifier[timestamp] ) | def DecodeMessages(self, response_comms):
"""Extract and verify server message.
Args:
response_comms: A ClientCommunication rdfvalue
Returns:
list of messages and the CN where they came from.
Raises:
DecryptionError: If the message failed to decrypt properly.
"""
# Have we seen this cipher before?
cipher_verified = False
try:
cipher = self.encrypted_cipher_cache.Get(response_comms.encrypted_cipher)
stats_collector_instance.Get().IncrementCounter('grr_encrypted_cipher_cache', fields=['hits'])
# Even though we have seen this encrypted cipher already, we should still
# make sure that all the other fields are sane and verify the HMAC.
cipher.VerifyReceivedHMAC(response_comms)
cipher_verified = True
# If we have the cipher in the cache, we know the source and
# should have a corresponding public key.
source = cipher.GetSource()
remote_public_key = self._GetRemotePublicKey(source) # depends on [control=['try'], data=[]]
except KeyError:
stats_collector_instance.Get().IncrementCounter('grr_encrypted_cipher_cache', fields=['misses'])
cipher = ReceivedCipher(response_comms, self.private_key)
source = cipher.GetSource()
try:
remote_public_key = self._GetRemotePublicKey(source)
if cipher.VerifyCipherSignature(remote_public_key):
# At this point we know this cipher is legit, we can cache it.
self.encrypted_cipher_cache.Put(response_comms.encrypted_cipher, cipher)
cipher_verified = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except UnknownClientCertError:
# We don't know who we are talking to.
remote_public_key = None # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
# Decrypt the message with the per packet IV.
plain = cipher.Decrypt(response_comms.encrypted, response_comms.packet_iv)
try:
packed_message_list = rdf_flows.PackedMessageList.FromSerializedString(plain) # depends on [control=['try'], data=[]]
except rdfvalue.DecodeError as e:
raise DecryptionError(e) # depends on [control=['except'], data=['e']]
message_list = self.DecompressMessageList(packed_message_list)
# Are these messages authenticated?
# pyformat: disable
auth_state = self.VerifyMessageSignature(response_comms, packed_message_list, cipher, cipher_verified, response_comms.api_version, remote_public_key)
# pyformat: enable
# Mark messages as authenticated and where they came from.
for msg in message_list.job:
msg.auth_state = auth_state
msg.source = cipher.cipher_metadata.source # depends on [control=['for'], data=['msg']]
return (message_list.job, cipher.cipher_metadata.source, packed_message_list.timestamp) |
def _rhat_ufunc(ary):
"""Ufunc for computing effective sample size.
This can be used on an xarray Dataset, using
`xr.apply_ufunc(_neff_ufunc, ..., input_core_dims=(('chain', 'draw'),))
"""
target = np.empty(ary.shape[:-2])
for idx in np.ndindex(target.shape):
target[idx] = _get_split_rhat(ary[idx])
return target | def function[_rhat_ufunc, parameter[ary]]:
constant[Ufunc for computing effective sample size.
This can be used on an xarray Dataset, using
`xr.apply_ufunc(_neff_ufunc, ..., input_core_dims=(('chain', 'draw'),))
]
variable[target] assign[=] call[name[np].empty, parameter[call[name[ary].shape][<ast.Slice object at 0x7da1b1c7d4b0>]]]
for taget[name[idx]] in starred[call[name[np].ndindex, parameter[name[target].shape]]] begin[:]
call[name[target]][name[idx]] assign[=] call[name[_get_split_rhat], parameter[call[name[ary]][name[idx]]]]
return[name[target]] | keyword[def] identifier[_rhat_ufunc] ( identifier[ary] ):
literal[string]
identifier[target] = identifier[np] . identifier[empty] ( identifier[ary] . identifier[shape] [:- literal[int] ])
keyword[for] identifier[idx] keyword[in] identifier[np] . identifier[ndindex] ( identifier[target] . identifier[shape] ):
identifier[target] [ identifier[idx] ]= identifier[_get_split_rhat] ( identifier[ary] [ identifier[idx] ])
keyword[return] identifier[target] | def _rhat_ufunc(ary):
"""Ufunc for computing effective sample size.
This can be used on an xarray Dataset, using
`xr.apply_ufunc(_neff_ufunc, ..., input_core_dims=(('chain', 'draw'),))
"""
target = np.empty(ary.shape[:-2])
for idx in np.ndindex(target.shape):
target[idx] = _get_split_rhat(ary[idx]) # depends on [control=['for'], data=['idx']]
return target |
def _build_package_finder(options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.get('find_links'),
index_urls=index_urls,
allow_all_prereleases=options.get('pre'),
trusted_hosts=options.get('trusted_hosts'),
session=session,
) | def function[_build_package_finder, parameter[options, index_urls, session]]:
constant[
Create a package finder appropriate to this list command.
]
return[call[name[PackageFinder], parameter[]]] | keyword[def] identifier[_build_package_finder] ( identifier[options] , identifier[index_urls] , identifier[session] ):
literal[string]
keyword[return] identifier[PackageFinder] (
identifier[find_links] = identifier[options] . identifier[get] ( literal[string] ),
identifier[index_urls] = identifier[index_urls] ,
identifier[allow_all_prereleases] = identifier[options] . identifier[get] ( literal[string] ),
identifier[trusted_hosts] = identifier[options] . identifier[get] ( literal[string] ),
identifier[session] = identifier[session] ,
) | def _build_package_finder(options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(find_links=options.get('find_links'), index_urls=index_urls, allow_all_prereleases=options.get('pre'), trusted_hosts=options.get('trusted_hosts'), session=session) |
def _apply_columns(self, func):
"""
Get new SparseDataFrame applying func to each columns
"""
new_data = {col: func(series)
for col, series in self.items()}
return self._constructor(
data=new_data, index=self.index, columns=self.columns,
default_fill_value=self.default_fill_value).__finalize__(self) | def function[_apply_columns, parameter[self, func]]:
constant[
Get new SparseDataFrame applying func to each columns
]
variable[new_data] assign[=] <ast.DictComp object at 0x7da18fe92140>
return[call[call[name[self]._constructor, parameter[]].__finalize__, parameter[name[self]]]] | keyword[def] identifier[_apply_columns] ( identifier[self] , identifier[func] ):
literal[string]
identifier[new_data] ={ identifier[col] : identifier[func] ( identifier[series] )
keyword[for] identifier[col] , identifier[series] keyword[in] identifier[self] . identifier[items] ()}
keyword[return] identifier[self] . identifier[_constructor] (
identifier[data] = identifier[new_data] , identifier[index] = identifier[self] . identifier[index] , identifier[columns] = identifier[self] . identifier[columns] ,
identifier[default_fill_value] = identifier[self] . identifier[default_fill_value] ). identifier[__finalize__] ( identifier[self] ) | def _apply_columns(self, func):
"""
Get new SparseDataFrame applying func to each columns
"""
new_data = {col: func(series) for (col, series) in self.items()}
return self._constructor(data=new_data, index=self.index, columns=self.columns, default_fill_value=self.default_fill_value).__finalize__(self) |
def parse_JSON(self, JSON_string):
"""
Parses an *COIndex* instance out of raw JSON data. Only certain
properties of the data are used: if these properties are not found or
cannot be parsed, an error is issued.
:param JSON_string: a raw JSON string
:type JSON_string: str
:returns: an *COIndex* instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result, *APIResponseError* if the JSON
string embeds an HTTP status error
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None')
d = json.loads(JSON_string)
try:
# -- reference time (strip away Z and T on ISO8601 format)
t = d['time'].replace('Z', '+00').replace('T', ' ')
reference_time = timeformatutils._ISO8601_to_UNIXtime(t)
# -- reception time (now)
reception_time = timeutils.now('unix')
# -- location
lon = float(d['location']['longitude'])
lat = float(d['location']['latitude'])
place = location.Location(None, lon, lat, None)
# -- CO samples
co_samples = d['data']
except KeyError:
raise parse_response_error.ParseResponseError(
''.join([__name__, ': impossible to parse COIndex']))
return coindex.COIndex(reference_time, place, None, co_samples,
reception_time) | def function[parse_JSON, parameter[self, JSON_string]]:
constant[
Parses an *COIndex* instance out of raw JSON data. Only certain
properties of the data are used: if these properties are not found or
cannot be parsed, an error is issued.
:param JSON_string: a raw JSON string
:type JSON_string: str
:returns: an *COIndex* instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result, *APIResponseError* if the JSON
string embeds an HTTP status error
]
if compare[name[JSON_string] is constant[None]] begin[:]
<ast.Raise object at 0x7da2045663e0>
variable[d] assign[=] call[name[json].loads, parameter[name[JSON_string]]]
<ast.Try object at 0x7da204566e60>
return[call[name[coindex].COIndex, parameter[name[reference_time], name[place], constant[None], name[co_samples], name[reception_time]]]] | keyword[def] identifier[parse_JSON] ( identifier[self] , identifier[JSON_string] ):
literal[string]
keyword[if] identifier[JSON_string] keyword[is] keyword[None] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] ( literal[string] )
identifier[d] = identifier[json] . identifier[loads] ( identifier[JSON_string] )
keyword[try] :
identifier[t] = identifier[d] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[reference_time] = identifier[timeformatutils] . identifier[_ISO8601_to_UNIXtime] ( identifier[t] )
identifier[reception_time] = identifier[timeutils] . identifier[now] ( literal[string] )
identifier[lon] = identifier[float] ( identifier[d] [ literal[string] ][ literal[string] ])
identifier[lat] = identifier[float] ( identifier[d] [ literal[string] ][ literal[string] ])
identifier[place] = identifier[location] . identifier[Location] ( keyword[None] , identifier[lon] , identifier[lat] , keyword[None] )
identifier[co_samples] = identifier[d] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[parse_response_error] . identifier[ParseResponseError] (
literal[string] . identifier[join] ([ identifier[__name__] , literal[string] ]))
keyword[return] identifier[coindex] . identifier[COIndex] ( identifier[reference_time] , identifier[place] , keyword[None] , identifier[co_samples] ,
identifier[reception_time] ) | def parse_JSON(self, JSON_string):
"""
Parses an *COIndex* instance out of raw JSON data. Only certain
properties of the data are used: if these properties are not found or
cannot be parsed, an error is issued.
:param JSON_string: a raw JSON string
:type JSON_string: str
:returns: an *COIndex* instance or ``None`` if no data is available
:raises: *ParseResponseError* if it is impossible to find or parse the
data needed to build the result, *APIResponseError* if the JSON
string embeds an HTTP status error
"""
if JSON_string is None:
raise parse_response_error.ParseResponseError('JSON data is None') # depends on [control=['if'], data=[]]
d = json.loads(JSON_string)
try:
# -- reference time (strip away Z and T on ISO8601 format)
t = d['time'].replace('Z', '+00').replace('T', ' ')
reference_time = timeformatutils._ISO8601_to_UNIXtime(t)
# -- reception time (now)
reception_time = timeutils.now('unix')
# -- location
lon = float(d['location']['longitude'])
lat = float(d['location']['latitude'])
place = location.Location(None, lon, lat, None)
# -- CO samples
co_samples = d['data'] # depends on [control=['try'], data=[]]
except KeyError:
raise parse_response_error.ParseResponseError(''.join([__name__, ': impossible to parse COIndex'])) # depends on [control=['except'], data=[]]
return coindex.COIndex(reference_time, place, None, co_samples, reception_time) |
def delay_for(
self,
wait: typing.Union[int, float],
identifier: typing.Any,
) -> bool:
"""Defer the execution of a function for some number of seconds.
Args:
wait (typing.Union[int, float]): A numeric value that represents
the number of seconds that must pass before the callback
becomes available for execution. All given values must be
positive.
identifier (typing.Any): The identifier returned from a call
to defer or defer_for.
Returns:
bool: True if the call is delayed. False if the identifier is
invalid or if the deferred call is already executed.
"""
raise NotImplementedError() | def function[delay_for, parameter[self, wait, identifier]]:
constant[Defer the execution of a function for some number of seconds.
Args:
wait (typing.Union[int, float]): A numeric value that represents
the number of seconds that must pass before the callback
becomes available for execution. All given values must be
positive.
identifier (typing.Any): The identifier returned from a call
to defer or defer_for.
Returns:
bool: True if the call is delayed. False if the identifier is
invalid or if the deferred call is already executed.
]
<ast.Raise object at 0x7da2046223e0> | keyword[def] identifier[delay_for] (
identifier[self] ,
identifier[wait] : identifier[typing] . identifier[Union] [ identifier[int] , identifier[float] ],
identifier[identifier] : identifier[typing] . identifier[Any] ,
)-> identifier[bool] :
literal[string]
keyword[raise] identifier[NotImplementedError] () | def delay_for(self, wait: typing.Union[int, float], identifier: typing.Any) -> bool:
"""Defer the execution of a function for some number of seconds.
Args:
wait (typing.Union[int, float]): A numeric value that represents
the number of seconds that must pass before the callback
becomes available for execution. All given values must be
positive.
identifier (typing.Any): The identifier returned from a call
to defer or defer_for.
Returns:
bool: True if the call is delayed. False if the identifier is
invalid or if the deferred call is already executed.
"""
raise NotImplementedError() |
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path) | def function[_verify, parameter[self, path_prefix]]:
constant[Verifies that this schema's doc spec is valid and makes sense.]
for taget[tuple[[<ast.Name object at 0x7da1b11c26b0>, <ast.Name object at 0x7da1b11c0340>]]] in starred[call[name[self].doc_spec.iteritems, parameter[]]] begin[:]
variable[path] assign[=] call[name[self]._append_path, parameter[name[path_prefix], name[field]]]
if call[name[isinstance], parameter[name[spec], name[dict]]] begin[:]
call[name[self]._verify_field_spec, parameter[name[spec], name[path]]] | keyword[def] identifier[_verify] ( identifier[self] , identifier[path_prefix] = keyword[None] ):
literal[string]
keyword[for] identifier[field] , identifier[spec] keyword[in] identifier[self] . identifier[doc_spec] . identifier[iteritems] ():
identifier[path] = identifier[self] . identifier[_append_path] ( identifier[path_prefix] , identifier[field] )
keyword[if] identifier[isinstance] ( identifier[spec] , identifier[dict] ):
identifier[self] . identifier[_verify_field_spec] ( identifier[spec] , identifier[path] )
keyword[else] :
keyword[raise] identifier[SchemaFormatException] ( literal[string] , identifier[path] ) | def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for (field, spec) in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path) # depends on [control=['if'], data=[]]
else:
raise SchemaFormatException('Invalid field definition for {}', path) # depends on [control=['for'], data=[]] |
def eval(self, valuation=None, trace=None):
"""
This method should be always called by subclasses
:param valuation
:param trace
:return: Arguments evaluation
"""
args2 = []
for a in self.args:
if isinstance(a, Function) or isinstance(a, IPredicate):
args2.append(Constant(a.eval(valuation=valuation, trace=trace)))
elif isinstance(a, Variable):
found = False
for v in valuation:
if str(v.var) == a.name:
args2.append(Constant(str(v.value.name)))
found = True
break
if not found:
raise Exception("IPredicate instantiation failed : missing vars")
else:
args2.append(Constant(a))
return args2 | def function[eval, parameter[self, valuation, trace]]:
constant[
This method should be always called by subclasses
:param valuation
:param trace
:return: Arguments evaluation
]
variable[args2] assign[=] list[[]]
for taget[name[a]] in starred[name[self].args] begin[:]
if <ast.BoolOp object at 0x7da20cabf490> begin[:]
call[name[args2].append, parameter[call[name[Constant], parameter[call[name[a].eval, parameter[]]]]]]
return[name[args2]] | keyword[def] identifier[eval] ( identifier[self] , identifier[valuation] = keyword[None] , identifier[trace] = keyword[None] ):
literal[string]
identifier[args2] =[]
keyword[for] identifier[a] keyword[in] identifier[self] . identifier[args] :
keyword[if] identifier[isinstance] ( identifier[a] , identifier[Function] ) keyword[or] identifier[isinstance] ( identifier[a] , identifier[IPredicate] ):
identifier[args2] . identifier[append] ( identifier[Constant] ( identifier[a] . identifier[eval] ( identifier[valuation] = identifier[valuation] , identifier[trace] = identifier[trace] )))
keyword[elif] identifier[isinstance] ( identifier[a] , identifier[Variable] ):
identifier[found] = keyword[False]
keyword[for] identifier[v] keyword[in] identifier[valuation] :
keyword[if] identifier[str] ( identifier[v] . identifier[var] )== identifier[a] . identifier[name] :
identifier[args2] . identifier[append] ( identifier[Constant] ( identifier[str] ( identifier[v] . identifier[value] . identifier[name] )))
identifier[found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
identifier[args2] . identifier[append] ( identifier[Constant] ( identifier[a] ))
keyword[return] identifier[args2] | def eval(self, valuation=None, trace=None):
"""
This method should be always called by subclasses
:param valuation
:param trace
:return: Arguments evaluation
"""
args2 = []
for a in self.args:
if isinstance(a, Function) or isinstance(a, IPredicate):
args2.append(Constant(a.eval(valuation=valuation, trace=trace))) # depends on [control=['if'], data=[]]
elif isinstance(a, Variable):
found = False
for v in valuation:
if str(v.var) == a.name:
args2.append(Constant(str(v.value.name)))
found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
if not found:
raise Exception('IPredicate instantiation failed : missing vars') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
args2.append(Constant(a)) # depends on [control=['for'], data=['a']]
return args2 |
def replace_read(self):
"""
Replaces all READ (type - 3) states to a PUSH (type - 1) and a POP (type - 2).
The actual state is replaced with the PUSH, and a new POP is created.
"""
for statenum in self.statediag:
state = self.statediag[statenum]
if state.type == 3: # READ state
state.type = 1
destination_and_symbol = self._generate_state(state.trans)
state.sym = destination_and_symbol
state.trans = {}
state.trans[destination_and_symbol] = [0]
statenumber_identifier = len(self.statediag) + 1
for state in self.toadd:
self.statediag[statenumber_identifier] = state
statenumber_identifier = statenumber_identifier + 1
return self.statediag | def function[replace_read, parameter[self]]:
constant[
Replaces all READ (type - 3) states to a PUSH (type - 1) and a POP (type - 2).
The actual state is replaced with the PUSH, and a new POP is created.
]
for taget[name[statenum]] in starred[name[self].statediag] begin[:]
variable[state] assign[=] call[name[self].statediag][name[statenum]]
if compare[name[state].type equal[==] constant[3]] begin[:]
name[state].type assign[=] constant[1]
variable[destination_and_symbol] assign[=] call[name[self]._generate_state, parameter[name[state].trans]]
name[state].sym assign[=] name[destination_and_symbol]
name[state].trans assign[=] dictionary[[], []]
call[name[state].trans][name[destination_and_symbol]] assign[=] list[[<ast.Constant object at 0x7da18dc9b670>]]
variable[statenumber_identifier] assign[=] binary_operation[call[name[len], parameter[name[self].statediag]] + constant[1]]
for taget[name[state]] in starred[name[self].toadd] begin[:]
call[name[self].statediag][name[statenumber_identifier]] assign[=] name[state]
variable[statenumber_identifier] assign[=] binary_operation[name[statenumber_identifier] + constant[1]]
return[name[self].statediag] | keyword[def] identifier[replace_read] ( identifier[self] ):
literal[string]
keyword[for] identifier[statenum] keyword[in] identifier[self] . identifier[statediag] :
identifier[state] = identifier[self] . identifier[statediag] [ identifier[statenum] ]
keyword[if] identifier[state] . identifier[type] == literal[int] :
identifier[state] . identifier[type] = literal[int]
identifier[destination_and_symbol] = identifier[self] . identifier[_generate_state] ( identifier[state] . identifier[trans] )
identifier[state] . identifier[sym] = identifier[destination_and_symbol]
identifier[state] . identifier[trans] ={}
identifier[state] . identifier[trans] [ identifier[destination_and_symbol] ]=[ literal[int] ]
identifier[statenumber_identifier] = identifier[len] ( identifier[self] . identifier[statediag] )+ literal[int]
keyword[for] identifier[state] keyword[in] identifier[self] . identifier[toadd] :
identifier[self] . identifier[statediag] [ identifier[statenumber_identifier] ]= identifier[state]
identifier[statenumber_identifier] = identifier[statenumber_identifier] + literal[int]
keyword[return] identifier[self] . identifier[statediag] | def replace_read(self):
"""
Replaces all READ (type - 3) states to a PUSH (type - 1) and a POP (type - 2).
The actual state is replaced with the PUSH, and a new POP is created.
"""
for statenum in self.statediag:
state = self.statediag[statenum]
if state.type == 3: # READ state
state.type = 1
destination_and_symbol = self._generate_state(state.trans)
state.sym = destination_and_symbol
state.trans = {}
state.trans[destination_and_symbol] = [0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['statenum']]
statenumber_identifier = len(self.statediag) + 1
for state in self.toadd:
self.statediag[statenumber_identifier] = state
statenumber_identifier = statenumber_identifier + 1 # depends on [control=['for'], data=['state']]
return self.statediag |
def get_maintenance_window(self, id, **kwargs): # noqa: E501
"""Get a specific maintenance window # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_maintenance_window(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerMaintenanceWindow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_maintenance_window_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_maintenance_window_with_http_info(id, **kwargs) # noqa: E501
return data | def function[get_maintenance_window, parameter[self, id]]:
constant[Get a specific maintenance window # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_maintenance_window(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerMaintenanceWindow
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].get_maintenance_window_with_http_info, parameter[name[id]]]] | keyword[def] identifier[get_maintenance_window] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_maintenance_window_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_maintenance_window_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def get_maintenance_window(self, id, **kwargs): # noqa: E501
'Get a specific maintenance window # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_maintenance_window(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainerMaintenanceWindow\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_maintenance_window_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.get_maintenance_window_with_http_info(id, **kwargs) # noqa: E501
return data |
def getUserPassword(host='www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca'):
""""Getting the username/password for host from .netrc filie """
if os.access(os.path.join(os.environ.get('HOME','/'),".netrc"),os.R_OK):
auth=netrc.netrc().authenticators(host)
else:
auth=False
if not auth:
sys.stdout.write("CADC Username: ")
username=sys.stdin.readline().strip('\n')
password=getpass.getpass().strip('\n')
else:
username=auth[0]
password=auth[2]
return (username,password) | def function[getUserPassword, parameter[host]]:
constant["Getting the username/password for host from .netrc filie ]
if call[name[os].access, parameter[call[name[os].path.join, parameter[call[name[os].environ.get, parameter[constant[HOME], constant[/]]], constant[.netrc]]], name[os].R_OK]] begin[:]
variable[auth] assign[=] call[call[name[netrc].netrc, parameter[]].authenticators, parameter[name[host]]]
if <ast.UnaryOp object at 0x7da1b1a3d450> begin[:]
call[name[sys].stdout.write, parameter[constant[CADC Username: ]]]
variable[username] assign[=] call[call[name[sys].stdin.readline, parameter[]].strip, parameter[constant[
]]]
variable[password] assign[=] call[call[name[getpass].getpass, parameter[]].strip, parameter[constant[
]]]
return[tuple[[<ast.Name object at 0x7da1b1a3f2e0>, <ast.Name object at 0x7da1b1a3f850>]]] | keyword[def] identifier[getUserPassword] ( identifier[host] = literal[string] ):
literal[string]
keyword[if] identifier[os] . identifier[access] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ), literal[string] ), identifier[os] . identifier[R_OK] ):
identifier[auth] = identifier[netrc] . identifier[netrc] (). identifier[authenticators] ( identifier[host] )
keyword[else] :
identifier[auth] = keyword[False]
keyword[if] keyword[not] identifier[auth] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] )
identifier[username] = identifier[sys] . identifier[stdin] . identifier[readline] (). identifier[strip] ( literal[string] )
identifier[password] = identifier[getpass] . identifier[getpass] (). identifier[strip] ( literal[string] )
keyword[else] :
identifier[username] = identifier[auth] [ literal[int] ]
identifier[password] = identifier[auth] [ literal[int] ]
keyword[return] ( identifier[username] , identifier[password] ) | def getUserPassword(host='www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca'):
""""Getting the username/password for host from .netrc filie """
if os.access(os.path.join(os.environ.get('HOME', '/'), '.netrc'), os.R_OK):
auth = netrc.netrc().authenticators(host) # depends on [control=['if'], data=[]]
else:
auth = False
if not auth:
sys.stdout.write('CADC Username: ')
username = sys.stdin.readline().strip('\n')
password = getpass.getpass().strip('\n') # depends on [control=['if'], data=[]]
else:
username = auth[0]
password = auth[2]
return (username, password) |
def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines)) | def function[save_lines, parameter[lines, filename]]:
constant[
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[f].write, parameter[call[constant[
].join, parameter[name[lines]]]]] | keyword[def] identifier[save_lines] ( identifier[lines] , identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[lines] )) | def save_lines(lines, filename):
"""
Save an array of lines to a file.
Args:
lines: An array of strings that will be saved as individual lines.
filename: Path to the output file.
"""
with open(filename, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines)) # depends on [control=['with'], data=['f']] |
def add_task_db(self, task):
'''向数据库中写入一个新的任务记录'''
sql = 'INSERT INTO tasks VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?)'
req = self.cursor.execute(sql, task)
self.check_commit() | def function[add_task_db, parameter[self, task]]:
constant[向数据库中写入一个新的任务记录]
variable[sql] assign[=] constant[INSERT INTO tasks VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?)]
variable[req] assign[=] call[name[self].cursor.execute, parameter[name[sql], name[task]]]
call[name[self].check_commit, parameter[]] | keyword[def] identifier[add_task_db] ( identifier[self] , identifier[task] ):
literal[string]
identifier[sql] = literal[string]
identifier[req] = identifier[self] . identifier[cursor] . identifier[execute] ( identifier[sql] , identifier[task] )
identifier[self] . identifier[check_commit] () | def add_task_db(self, task):
"""向数据库中写入一个新的任务记录"""
sql = 'INSERT INTO tasks VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?)'
req = self.cursor.execute(sql, task)
self.check_commit() |
def similarity_by_path(sense1: "wn.Synset", sense2: "wn.Synset", option: str = "path") -> float:
"""
Returns maximum path similarity between two senses.
:param sense1: A synset.
:param sense2: A synset.
:param option: String, one of ('path', 'wup', 'lch').
:return: A float, similarity measurement.
"""
if option.lower() in ["path", "path_similarity"]: # Path similarities.
return max(wn.path_similarity(sense1, sense2, if_none_return=0),
wn.path_similarity(sense2, sense1, if_none_return=0))
elif option.lower() in ["wup", "wupa", "wu-palmer", "wu-palmer"]: # Wu-Palmer
return max(wn.wup_similarity(sense1, sense2, if_none_return=0),
wn.wup_similarity(sense2, sense1, if_none_return=0))
elif option.lower() in ['lch', "leacock-chordorow"]: # Leacock-Chodorow
if sense1.pos != sense2.pos: # lch can't do diff POS
return 0
return wn.lch_similarity(sense1, sense2, if_none_return=0) | def function[similarity_by_path, parameter[sense1, sense2, option]]:
constant[
Returns maximum path similarity between two senses.
:param sense1: A synset.
:param sense2: A synset.
:param option: String, one of ('path', 'wup', 'lch').
:return: A float, similarity measurement.
]
if compare[call[name[option].lower, parameter[]] in list[[<ast.Constant object at 0x7da1b1d5fa60>, <ast.Constant object at 0x7da1b1d5fb50>]]] begin[:]
return[call[name[max], parameter[call[name[wn].path_similarity, parameter[name[sense1], name[sense2]]], call[name[wn].path_similarity, parameter[name[sense2], name[sense1]]]]]] | keyword[def] identifier[similarity_by_path] ( identifier[sense1] : literal[string] , identifier[sense2] : literal[string] , identifier[option] : identifier[str] = literal[string] )-> identifier[float] :
literal[string]
keyword[if] identifier[option] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[max] ( identifier[wn] . identifier[path_similarity] ( identifier[sense1] , identifier[sense2] , identifier[if_none_return] = literal[int] ),
identifier[wn] . identifier[path_similarity] ( identifier[sense2] , identifier[sense1] , identifier[if_none_return] = literal[int] ))
keyword[elif] identifier[option] . identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] identifier[max] ( identifier[wn] . identifier[wup_similarity] ( identifier[sense1] , identifier[sense2] , identifier[if_none_return] = literal[int] ),
identifier[wn] . identifier[wup_similarity] ( identifier[sense2] , identifier[sense1] , identifier[if_none_return] = literal[int] ))
keyword[elif] identifier[option] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[sense1] . identifier[pos] != identifier[sense2] . identifier[pos] :
keyword[return] literal[int]
keyword[return] identifier[wn] . identifier[lch_similarity] ( identifier[sense1] , identifier[sense2] , identifier[if_none_return] = literal[int] ) | def similarity_by_path(sense1: 'wn.Synset', sense2: 'wn.Synset', option: str='path') -> float:
"""
Returns maximum path similarity between two senses.
:param sense1: A synset.
:param sense2: A synset.
:param option: String, one of ('path', 'wup', 'lch').
:return: A float, similarity measurement.
"""
if option.lower() in ['path', 'path_similarity']: # Path similarities.
return max(wn.path_similarity(sense1, sense2, if_none_return=0), wn.path_similarity(sense2, sense1, if_none_return=0)) # depends on [control=['if'], data=[]]
elif option.lower() in ['wup', 'wupa', 'wu-palmer', 'wu-palmer']: # Wu-Palmer
return max(wn.wup_similarity(sense1, sense2, if_none_return=0), wn.wup_similarity(sense2, sense1, if_none_return=0)) # depends on [control=['if'], data=[]]
elif option.lower() in ['lch', 'leacock-chordorow']: # Leacock-Chodorow
if sense1.pos != sense2.pos: # lch can't do diff POS
return 0 # depends on [control=['if'], data=[]]
return wn.lch_similarity(sense1, sense2, if_none_return=0) # depends on [control=['if'], data=[]] |
def matching_selectors(self, partial_selector):
"""Retrieves all selectors matching `partial_selector`.
For instance, if "one.a.b" and "two.a.b" are stored in a `SelectorMap`, both
`matching_selectors('b')` and `matching_selectors('a.b')` will return them.
In the event that `partial_selector` exactly matches an existing complete
selector, only that complete selector is returned. For instance, if
"a.b.c.d" and "c.d" are stored, `matching_selectors('c.d')` will return only
`['c.d']`, while `matching_selectors('d')` will return both.
Args:
partial_selector: The partial selector to find matches for.
Returns:
A list of selectors matching `partial_selector`.
"""
if partial_selector in self._selector_map:
return [partial_selector]
selector_components = partial_selector.split('.')
node = self._selector_tree
for component in reversed(selector_components):
if component not in node:
return []
node = node[component]
selectors = []
dfs_stack = [node]
while dfs_stack:
node = dfs_stack.pop().copy()
selector = node.pop(_TERMINAL_KEY, None)
dfs_stack.extend(node.values())
if selector:
selectors.append(selector)
return selectors | def function[matching_selectors, parameter[self, partial_selector]]:
constant[Retrieves all selectors matching `partial_selector`.
For instance, if "one.a.b" and "two.a.b" are stored in a `SelectorMap`, both
`matching_selectors('b')` and `matching_selectors('a.b')` will return them.
In the event that `partial_selector` exactly matches an existing complete
selector, only that complete selector is returned. For instance, if
"a.b.c.d" and "c.d" are stored, `matching_selectors('c.d')` will return only
`['c.d']`, while `matching_selectors('d')` will return both.
Args:
partial_selector: The partial selector to find matches for.
Returns:
A list of selectors matching `partial_selector`.
]
if compare[name[partial_selector] in name[self]._selector_map] begin[:]
return[list[[<ast.Name object at 0x7da1b020cb50>]]]
variable[selector_components] assign[=] call[name[partial_selector].split, parameter[constant[.]]]
variable[node] assign[=] name[self]._selector_tree
for taget[name[component]] in starred[call[name[reversed], parameter[name[selector_components]]]] begin[:]
if compare[name[component] <ast.NotIn object at 0x7da2590d7190> name[node]] begin[:]
return[list[[]]]
variable[node] assign[=] call[name[node]][name[component]]
variable[selectors] assign[=] list[[]]
variable[dfs_stack] assign[=] list[[<ast.Name object at 0x7da1b0285660>]]
while name[dfs_stack] begin[:]
variable[node] assign[=] call[call[name[dfs_stack].pop, parameter[]].copy, parameter[]]
variable[selector] assign[=] call[name[node].pop, parameter[name[_TERMINAL_KEY], constant[None]]]
call[name[dfs_stack].extend, parameter[call[name[node].values, parameter[]]]]
if name[selector] begin[:]
call[name[selectors].append, parameter[name[selector]]]
return[name[selectors]] | keyword[def] identifier[matching_selectors] ( identifier[self] , identifier[partial_selector] ):
literal[string]
keyword[if] identifier[partial_selector] keyword[in] identifier[self] . identifier[_selector_map] :
keyword[return] [ identifier[partial_selector] ]
identifier[selector_components] = identifier[partial_selector] . identifier[split] ( literal[string] )
identifier[node] = identifier[self] . identifier[_selector_tree]
keyword[for] identifier[component] keyword[in] identifier[reversed] ( identifier[selector_components] ):
keyword[if] identifier[component] keyword[not] keyword[in] identifier[node] :
keyword[return] []
identifier[node] = identifier[node] [ identifier[component] ]
identifier[selectors] =[]
identifier[dfs_stack] =[ identifier[node] ]
keyword[while] identifier[dfs_stack] :
identifier[node] = identifier[dfs_stack] . identifier[pop] (). identifier[copy] ()
identifier[selector] = identifier[node] . identifier[pop] ( identifier[_TERMINAL_KEY] , keyword[None] )
identifier[dfs_stack] . identifier[extend] ( identifier[node] . identifier[values] ())
keyword[if] identifier[selector] :
identifier[selectors] . identifier[append] ( identifier[selector] )
keyword[return] identifier[selectors] | def matching_selectors(self, partial_selector):
"""Retrieves all selectors matching `partial_selector`.
For instance, if "one.a.b" and "two.a.b" are stored in a `SelectorMap`, both
`matching_selectors('b')` and `matching_selectors('a.b')` will return them.
In the event that `partial_selector` exactly matches an existing complete
selector, only that complete selector is returned. For instance, if
"a.b.c.d" and "c.d" are stored, `matching_selectors('c.d')` will return only
`['c.d']`, while `matching_selectors('d')` will return both.
Args:
partial_selector: The partial selector to find matches for.
Returns:
A list of selectors matching `partial_selector`.
"""
if partial_selector in self._selector_map:
return [partial_selector] # depends on [control=['if'], data=['partial_selector']]
selector_components = partial_selector.split('.')
node = self._selector_tree
for component in reversed(selector_components):
if component not in node:
return [] # depends on [control=['if'], data=[]]
node = node[component] # depends on [control=['for'], data=['component']]
selectors = []
dfs_stack = [node]
while dfs_stack:
node = dfs_stack.pop().copy()
selector = node.pop(_TERMINAL_KEY, None)
dfs_stack.extend(node.values())
if selector:
selectors.append(selector) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return selectors |
def remove_app(name, site):
'''
Remove an IIS application.
:param str name: The application name.
:param str site: The IIS site name.
Usage:
.. code-block:: yaml
site0-v1-app-remove:
win_iis.remove_app:
- name: v1
- site: site0
'''
ret = {'name': name,
'changes': {},
'comment': str(),
'result': None}
current_apps = __salt__['win_iis.list_apps'](site)
if name not in current_apps:
ret['comment'] = 'Application has already been removed: {0}'.format(name)
ret['result'] = True
elif __opts__['test']:
ret['comment'] = 'Application will be removed: {0}'.format(name)
ret['changes'] = {'old': name,
'new': None}
else:
ret['comment'] = 'Removed application: {0}'.format(name)
ret['changes'] = {'old': name,
'new': None}
ret['result'] = __salt__['win_iis.remove_app'](name, site)
return ret | def function[remove_app, parameter[name, site]]:
constant[
Remove an IIS application.
:param str name: The application name.
:param str site: The IIS site name.
Usage:
.. code-block:: yaml
site0-v1-app-remove:
win_iis.remove_app:
- name: v1
- site: site0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da204345a50>, <ast.Constant object at 0x7da204346b30>, <ast.Constant object at 0x7da204345420>, <ast.Constant object at 0x7da204344d00>], [<ast.Name object at 0x7da204347430>, <ast.Dict object at 0x7da2043469e0>, <ast.Call object at 0x7da2043453c0>, <ast.Constant object at 0x7da204347220>]]
variable[current_apps] assign[=] call[call[name[__salt__]][constant[win_iis.list_apps]], parameter[name[site]]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[current_apps]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Application has already been removed: {0}].format, parameter[name[name]]]
call[name[ret]][constant[result]] assign[=] constant[True]
return[name[ret]] | keyword[def] identifier[remove_app] ( identifier[name] , identifier[site] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : identifier[str] (),
literal[string] : keyword[None] }
identifier[current_apps] = identifier[__salt__] [ literal[string] ]( identifier[site] )
keyword[if] identifier[name] keyword[not] keyword[in] identifier[current_apps] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]= keyword[True]
keyword[elif] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ literal[string] : identifier[name] ,
literal[string] : keyword[None] }
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ literal[string] : identifier[name] ,
literal[string] : keyword[None] }
identifier[ret] [ literal[string] ]= identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[site] )
keyword[return] identifier[ret] | def remove_app(name, site):
"""
Remove an IIS application.
:param str name: The application name.
:param str site: The IIS site name.
Usage:
.. code-block:: yaml
site0-v1-app-remove:
win_iis.remove_app:
- name: v1
- site: site0
"""
ret = {'name': name, 'changes': {}, 'comment': str(), 'result': None}
current_apps = __salt__['win_iis.list_apps'](site)
if name not in current_apps:
ret['comment'] = 'Application has already been removed: {0}'.format(name)
ret['result'] = True # depends on [control=['if'], data=['name']]
elif __opts__['test']:
ret['comment'] = 'Application will be removed: {0}'.format(name)
ret['changes'] = {'old': name, 'new': None} # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Removed application: {0}'.format(name)
ret['changes'] = {'old': name, 'new': None}
ret['result'] = __salt__['win_iis.remove_app'](name, site)
return ret |
def make_ita(noise, acf=None):
"""
Create the matrix ita of the noise where the noise may be a masked array
where ita(x,y) is the correlation between pixel pairs that have the same separation as x and y.
Parameters
----------
noise : 2d-array
The noise image
acf : 2d-array
The autocorrelation matrix. (None = calculate from data).
Default = None.
Returns
-------
ita : 2d-array
The matrix ita
"""
if acf is None:
acf = nan_acf(noise)
# s should be the number of non-masked pixels
s = np.count_nonzero(np.isfinite(noise))
# the indices of the non-masked pixels
xm, ym = np.where(np.isfinite(noise))
ita = np.zeros((s, s))
# iterate over the pixels
for i, (x1, y1) in enumerate(zip(xm, ym)):
for j, (x2, y2) in enumerate(zip(xm, ym)):
k = abs(x1-x2)
l = abs(y1-y2)
ita[i, j] = acf[k, l]
return ita | def function[make_ita, parameter[noise, acf]]:
constant[
Create the matrix ita of the noise where the noise may be a masked array
where ita(x,y) is the correlation between pixel pairs that have the same separation as x and y.
Parameters
----------
noise : 2d-array
The noise image
acf : 2d-array
The autocorrelation matrix. (None = calculate from data).
Default = None.
Returns
-------
ita : 2d-array
The matrix ita
]
if compare[name[acf] is constant[None]] begin[:]
variable[acf] assign[=] call[name[nan_acf], parameter[name[noise]]]
variable[s] assign[=] call[name[np].count_nonzero, parameter[call[name[np].isfinite, parameter[name[noise]]]]]
<ast.Tuple object at 0x7da1b26af0d0> assign[=] call[name[np].where, parameter[call[name[np].isfinite, parameter[name[noise]]]]]
variable[ita] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da2047e9f90>, <ast.Name object at 0x7da2047e8310>]]]]
for taget[tuple[[<ast.Name object at 0x7da2047eb7f0>, <ast.Tuple object at 0x7da2047e93f0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[xm], name[ym]]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f58d360>, <ast.Tuple object at 0x7da18f58f790>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[xm], name[ym]]]]]] begin[:]
variable[k] assign[=] call[name[abs], parameter[binary_operation[name[x1] - name[x2]]]]
variable[l] assign[=] call[name[abs], parameter[binary_operation[name[y1] - name[y2]]]]
call[name[ita]][tuple[[<ast.Name object at 0x7da18f58e9b0>, <ast.Name object at 0x7da18f58d9f0>]]] assign[=] call[name[acf]][tuple[[<ast.Name object at 0x7da18f58fb20>, <ast.Name object at 0x7da18f58fc40>]]]
return[name[ita]] | keyword[def] identifier[make_ita] ( identifier[noise] , identifier[acf] = keyword[None] ):
literal[string]
keyword[if] identifier[acf] keyword[is] keyword[None] :
identifier[acf] = identifier[nan_acf] ( identifier[noise] )
identifier[s] = identifier[np] . identifier[count_nonzero] ( identifier[np] . identifier[isfinite] ( identifier[noise] ))
identifier[xm] , identifier[ym] = identifier[np] . identifier[where] ( identifier[np] . identifier[isfinite] ( identifier[noise] ))
identifier[ita] = identifier[np] . identifier[zeros] (( identifier[s] , identifier[s] ))
keyword[for] identifier[i] ,( identifier[x1] , identifier[y1] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[xm] , identifier[ym] )):
keyword[for] identifier[j] ,( identifier[x2] , identifier[y2] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[xm] , identifier[ym] )):
identifier[k] = identifier[abs] ( identifier[x1] - identifier[x2] )
identifier[l] = identifier[abs] ( identifier[y1] - identifier[y2] )
identifier[ita] [ identifier[i] , identifier[j] ]= identifier[acf] [ identifier[k] , identifier[l] ]
keyword[return] identifier[ita] | def make_ita(noise, acf=None):
"""
Create the matrix ita of the noise where the noise may be a masked array
where ita(x,y) is the correlation between pixel pairs that have the same separation as x and y.
Parameters
----------
noise : 2d-array
The noise image
acf : 2d-array
The autocorrelation matrix. (None = calculate from data).
Default = None.
Returns
-------
ita : 2d-array
The matrix ita
"""
if acf is None:
acf = nan_acf(noise) # depends on [control=['if'], data=['acf']]
# s should be the number of non-masked pixels
s = np.count_nonzero(np.isfinite(noise))
# the indices of the non-masked pixels
(xm, ym) = np.where(np.isfinite(noise))
ita = np.zeros((s, s))
# iterate over the pixels
for (i, (x1, y1)) in enumerate(zip(xm, ym)):
for (j, (x2, y2)) in enumerate(zip(xm, ym)):
k = abs(x1 - x2)
l = abs(y1 - y2)
ita[i, j] = acf[k, l] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return ita |
def register_error_handler(app, handler=None):
"""
Register error handler
Registers an exception handler on the app instance for every type of
exception code werkzeug is aware about.
:param app: flask.Flask - flask application instance
:param handler: function - the handler
:return: None
"""
if not handler:
handler = default_error_handler
for code in exceptions.default_exceptions.keys():
app.register_error_handler(code, handler) | def function[register_error_handler, parameter[app, handler]]:
constant[
Register error handler
Registers an exception handler on the app instance for every type of
exception code werkzeug is aware about.
:param app: flask.Flask - flask application instance
:param handler: function - the handler
:return: None
]
if <ast.UnaryOp object at 0x7da20c6c58d0> begin[:]
variable[handler] assign[=] name[default_error_handler]
for taget[name[code]] in starred[call[name[exceptions].default_exceptions.keys, parameter[]]] begin[:]
call[name[app].register_error_handler, parameter[name[code], name[handler]]] | keyword[def] identifier[register_error_handler] ( identifier[app] , identifier[handler] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[handler] :
identifier[handler] = identifier[default_error_handler]
keyword[for] identifier[code] keyword[in] identifier[exceptions] . identifier[default_exceptions] . identifier[keys] ():
identifier[app] . identifier[register_error_handler] ( identifier[code] , identifier[handler] ) | def register_error_handler(app, handler=None):
"""
Register error handler
Registers an exception handler on the app instance for every type of
exception code werkzeug is aware about.
:param app: flask.Flask - flask application instance
:param handler: function - the handler
:return: None
"""
if not handler:
handler = default_error_handler # depends on [control=['if'], data=[]]
for code in exceptions.default_exceptions.keys():
app.register_error_handler(code, handler) # depends on [control=['for'], data=['code']] |
def sanitize_for_archive(url, headers, payload):
"""Sanitize payload of a HTTP request by removing the token information
before storing/retrieving archived items
:param: url: HTTP url request
:param: headers: HTTP headers request
:param: payload: HTTP payload request
:returns url, headers and the sanitized payload
"""
if DiscourseClient.PKEY in payload:
payload.pop(DiscourseClient.PKEY)
return url, headers, payload | def function[sanitize_for_archive, parameter[url, headers, payload]]:
constant[Sanitize payload of a HTTP request by removing the token information
before storing/retrieving archived items
:param: url: HTTP url request
:param: headers: HTTP headers request
:param: payload: HTTP payload request
:returns url, headers and the sanitized payload
]
if compare[name[DiscourseClient].PKEY in name[payload]] begin[:]
call[name[payload].pop, parameter[name[DiscourseClient].PKEY]]
return[tuple[[<ast.Name object at 0x7da1b0297400>, <ast.Name object at 0x7da1b0294f40>, <ast.Name object at 0x7da1b0294910>]]] | keyword[def] identifier[sanitize_for_archive] ( identifier[url] , identifier[headers] , identifier[payload] ):
literal[string]
keyword[if] identifier[DiscourseClient] . identifier[PKEY] keyword[in] identifier[payload] :
identifier[payload] . identifier[pop] ( identifier[DiscourseClient] . identifier[PKEY] )
keyword[return] identifier[url] , identifier[headers] , identifier[payload] | def sanitize_for_archive(url, headers, payload):
"""Sanitize payload of a HTTP request by removing the token information
before storing/retrieving archived items
:param: url: HTTP url request
:param: headers: HTTP headers request
:param: payload: HTTP payload request
:returns url, headers and the sanitized payload
"""
if DiscourseClient.PKEY in payload:
payload.pop(DiscourseClient.PKEY) # depends on [control=['if'], data=['payload']]
return (url, headers, payload) |
def _gl_look_at(self, pos, target, up):
"""
The standard lookAt method
:param pos: current position
:param target: target position to look at
:param up: direction up
"""
z = vector.normalise(pos - target)
x = vector.normalise(vector3.cross(vector.normalise(up), z))
y = vector3.cross(z, x)
translate = matrix44.create_identity()
translate[3][0] = -pos.x
translate[3][1] = -pos.y
translate[3][2] = -pos.z
rotate = matrix44.create_identity()
rotate[0][0] = x[0] # -- X
rotate[1][0] = x[1]
rotate[2][0] = x[2]
rotate[0][1] = y[0] # -- Y
rotate[1][1] = y[1]
rotate[2][1] = y[2]
rotate[0][2] = z[0] # -- Z
rotate[1][2] = z[1]
rotate[2][2] = z[2]
return matrix44.multiply(translate, rotate) | def function[_gl_look_at, parameter[self, pos, target, up]]:
constant[
The standard lookAt method
:param pos: current position
:param target: target position to look at
:param up: direction up
]
variable[z] assign[=] call[name[vector].normalise, parameter[binary_operation[name[pos] - name[target]]]]
variable[x] assign[=] call[name[vector].normalise, parameter[call[name[vector3].cross, parameter[call[name[vector].normalise, parameter[name[up]]], name[z]]]]]
variable[y] assign[=] call[name[vector3].cross, parameter[name[z], name[x]]]
variable[translate] assign[=] call[name[matrix44].create_identity, parameter[]]
call[call[name[translate]][constant[3]]][constant[0]] assign[=] <ast.UnaryOp object at 0x7da18f810640>
call[call[name[translate]][constant[3]]][constant[1]] assign[=] <ast.UnaryOp object at 0x7da18f812d10>
call[call[name[translate]][constant[3]]][constant[2]] assign[=] <ast.UnaryOp object at 0x7da18f813af0>
variable[rotate] assign[=] call[name[matrix44].create_identity, parameter[]]
call[call[name[rotate]][constant[0]]][constant[0]] assign[=] call[name[x]][constant[0]]
call[call[name[rotate]][constant[1]]][constant[0]] assign[=] call[name[x]][constant[1]]
call[call[name[rotate]][constant[2]]][constant[0]] assign[=] call[name[x]][constant[2]]
call[call[name[rotate]][constant[0]]][constant[1]] assign[=] call[name[y]][constant[0]]
call[call[name[rotate]][constant[1]]][constant[1]] assign[=] call[name[y]][constant[1]]
call[call[name[rotate]][constant[2]]][constant[1]] assign[=] call[name[y]][constant[2]]
call[call[name[rotate]][constant[0]]][constant[2]] assign[=] call[name[z]][constant[0]]
call[call[name[rotate]][constant[1]]][constant[2]] assign[=] call[name[z]][constant[1]]
call[call[name[rotate]][constant[2]]][constant[2]] assign[=] call[name[z]][constant[2]]
return[call[name[matrix44].multiply, parameter[name[translate], name[rotate]]]] | keyword[def] identifier[_gl_look_at] ( identifier[self] , identifier[pos] , identifier[target] , identifier[up] ):
literal[string]
identifier[z] = identifier[vector] . identifier[normalise] ( identifier[pos] - identifier[target] )
identifier[x] = identifier[vector] . identifier[normalise] ( identifier[vector3] . identifier[cross] ( identifier[vector] . identifier[normalise] ( identifier[up] ), identifier[z] ))
identifier[y] = identifier[vector3] . identifier[cross] ( identifier[z] , identifier[x] )
identifier[translate] = identifier[matrix44] . identifier[create_identity] ()
identifier[translate] [ literal[int] ][ literal[int] ]=- identifier[pos] . identifier[x]
identifier[translate] [ literal[int] ][ literal[int] ]=- identifier[pos] . identifier[y]
identifier[translate] [ literal[int] ][ literal[int] ]=- identifier[pos] . identifier[z]
identifier[rotate] = identifier[matrix44] . identifier[create_identity] ()
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[x] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[x] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[x] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[y] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[y] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[y] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[z] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[z] [ literal[int] ]
identifier[rotate] [ literal[int] ][ literal[int] ]= identifier[z] [ literal[int] ]
keyword[return] identifier[matrix44] . identifier[multiply] ( identifier[translate] , identifier[rotate] ) | def _gl_look_at(self, pos, target, up):
"""
The standard lookAt method
:param pos: current position
:param target: target position to look at
:param up: direction up
"""
z = vector.normalise(pos - target)
x = vector.normalise(vector3.cross(vector.normalise(up), z))
y = vector3.cross(z, x)
translate = matrix44.create_identity()
translate[3][0] = -pos.x
translate[3][1] = -pos.y
translate[3][2] = -pos.z
rotate = matrix44.create_identity()
rotate[0][0] = x[0] # -- X
rotate[1][0] = x[1]
rotate[2][0] = x[2]
rotate[0][1] = y[0] # -- Y
rotate[1][1] = y[1]
rotate[2][1] = y[2]
rotate[0][2] = z[0] # -- Z
rotate[1][2] = z[1]
rotate[2][2] = z[2]
return matrix44.multiply(translate, rotate) |
def remove_router_from_hosting_device(self, client, hosting_device_id,
router_id):
"""Remove a router from hosting_device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.delete((res_path + DEVICE_L3_ROUTERS + "/%s") % (
hosting_device_id, router_id)) | def function[remove_router_from_hosting_device, parameter[self, client, hosting_device_id, router_id]]:
constant[Remove a router from hosting_device.]
variable[res_path] assign[=] name[hostingdevice].HostingDevice.resource_path
return[call[name[client].delete, parameter[binary_operation[binary_operation[binary_operation[name[res_path] + name[DEVICE_L3_ROUTERS]] + constant[/%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b34c10>, <ast.Name object at 0x7da1b1b34be0>]]]]]] | keyword[def] identifier[remove_router_from_hosting_device] ( identifier[self] , identifier[client] , identifier[hosting_device_id] ,
identifier[router_id] ):
literal[string]
identifier[res_path] = identifier[hostingdevice] . identifier[HostingDevice] . identifier[resource_path]
keyword[return] identifier[client] . identifier[delete] (( identifier[res_path] + identifier[DEVICE_L3_ROUTERS] + literal[string] )%(
identifier[hosting_device_id] , identifier[router_id] )) | def remove_router_from_hosting_device(self, client, hosting_device_id, router_id):
"""Remove a router from hosting_device."""
res_path = hostingdevice.HostingDevice.resource_path
return client.delete((res_path + DEVICE_L3_ROUTERS + '/%s') % (hosting_device_id, router_id)) |
def _modify_trust(self, truster, mod_peer_func, trustee):
'''Modify a trusted peer device by deploying an iapp.
:param truster: ManagementRoot object -- device on which to perform
commands
:param mod_peer_func: function -- function to call to modify peer
:param trustee: ManagementRoot object or str -- device to modify
'''
iapp_name = 'trusted_device'
mod_peer_cmd = mod_peer_func(trustee)
iapp_actions = self.iapp_actions.copy()
iapp_actions['definition']['implementation'] = mod_peer_cmd
self._deploy_iapp(iapp_name, iapp_actions, truster)
self._delete_iapp(iapp_name, truster) | def function[_modify_trust, parameter[self, truster, mod_peer_func, trustee]]:
constant[Modify a trusted peer device by deploying an iapp.
:param truster: ManagementRoot object -- device on which to perform
commands
:param mod_peer_func: function -- function to call to modify peer
:param trustee: ManagementRoot object or str -- device to modify
]
variable[iapp_name] assign[=] constant[trusted_device]
variable[mod_peer_cmd] assign[=] call[name[mod_peer_func], parameter[name[trustee]]]
variable[iapp_actions] assign[=] call[name[self].iapp_actions.copy, parameter[]]
call[call[name[iapp_actions]][constant[definition]]][constant[implementation]] assign[=] name[mod_peer_cmd]
call[name[self]._deploy_iapp, parameter[name[iapp_name], name[iapp_actions], name[truster]]]
call[name[self]._delete_iapp, parameter[name[iapp_name], name[truster]]] | keyword[def] identifier[_modify_trust] ( identifier[self] , identifier[truster] , identifier[mod_peer_func] , identifier[trustee] ):
literal[string]
identifier[iapp_name] = literal[string]
identifier[mod_peer_cmd] = identifier[mod_peer_func] ( identifier[trustee] )
identifier[iapp_actions] = identifier[self] . identifier[iapp_actions] . identifier[copy] ()
identifier[iapp_actions] [ literal[string] ][ literal[string] ]= identifier[mod_peer_cmd]
identifier[self] . identifier[_deploy_iapp] ( identifier[iapp_name] , identifier[iapp_actions] , identifier[truster] )
identifier[self] . identifier[_delete_iapp] ( identifier[iapp_name] , identifier[truster] ) | def _modify_trust(self, truster, mod_peer_func, trustee):
"""Modify a trusted peer device by deploying an iapp.
:param truster: ManagementRoot object -- device on which to perform
commands
:param mod_peer_func: function -- function to call to modify peer
:param trustee: ManagementRoot object or str -- device to modify
"""
iapp_name = 'trusted_device'
mod_peer_cmd = mod_peer_func(trustee)
iapp_actions = self.iapp_actions.copy()
iapp_actions['definition']['implementation'] = mod_peer_cmd
self._deploy_iapp(iapp_name, iapp_actions, truster)
self._delete_iapp(iapp_name, truster) |
def _job_queue_empty(self):
"""
A callback method called when the job queue is empty.
:return: None
"""
self._iteratively_clean_pending_exits()
while self._pending_jobs:
# We don't have any exits remaining. Let's pop out a pending exit
pending_job = self._get_one_pending_job()
if pending_job is None:
continue
self._insert_job(pending_job)
self._register_analysis_job(pending_job.func_addr, pending_job)
break | def function[_job_queue_empty, parameter[self]]:
constant[
A callback method called when the job queue is empty.
:return: None
]
call[name[self]._iteratively_clean_pending_exits, parameter[]]
while name[self]._pending_jobs begin[:]
variable[pending_job] assign[=] call[name[self]._get_one_pending_job, parameter[]]
if compare[name[pending_job] is constant[None]] begin[:]
continue
call[name[self]._insert_job, parameter[name[pending_job]]]
call[name[self]._register_analysis_job, parameter[name[pending_job].func_addr, name[pending_job]]]
break | keyword[def] identifier[_job_queue_empty] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_iteratively_clean_pending_exits] ()
keyword[while] identifier[self] . identifier[_pending_jobs] :
identifier[pending_job] = identifier[self] . identifier[_get_one_pending_job] ()
keyword[if] identifier[pending_job] keyword[is] keyword[None] :
keyword[continue]
identifier[self] . identifier[_insert_job] ( identifier[pending_job] )
identifier[self] . identifier[_register_analysis_job] ( identifier[pending_job] . identifier[func_addr] , identifier[pending_job] )
keyword[break] | def _job_queue_empty(self):
"""
A callback method called when the job queue is empty.
:return: None
"""
self._iteratively_clean_pending_exits()
while self._pending_jobs:
# We don't have any exits remaining. Let's pop out a pending exit
pending_job = self._get_one_pending_job()
if pending_job is None:
continue # depends on [control=['if'], data=[]]
self._insert_job(pending_job)
self._register_analysis_job(pending_job.func_addr, pending_job)
break # depends on [control=['while'], data=[]] |
def set_image(self, text):
"""
Save image resource at `text` (path or url) to storage, then return the
replacement string and the necessary exercicse image file object.
Args:
- text (str): path or url to parse as an exercise image resource
Returns: (new_text, files)
- `new_text` (str): replacement string for the original `text` string
- `files` (list): list of files that were downloaded from `text`
"""
# Make sure `text` hasn't already been processed
if exercises.CONTENT_STORAGE_PLACEHOLDER in text:
return text, []
# Strip `text` of whitespace
stripped_text = text.strip().replace('\\n', '')
# If `stripped_text` is a web+graphie: path, we need special processing
graphie_regex = re.compile(WEB_GRAPHIE_URL_REGEX, flags=re.IGNORECASE)
graphie_match = graphie_regex.match(stripped_text)
if graphie_match:
is_web_plus_graphie = True
graphie_rawpath = graphie_match.groupdict()['rawpath']
graphie_path = graphie_rawpath.replace("//", "https://")
exercise_image_file = _ExerciseGraphieFile(graphie_path)
elif get_base64_encoding(stripped_text):
is_web_plus_graphie = False
exercise_image_file = _ExerciseBase64ImageFile(stripped_text)
else:
is_web_plus_graphie = False
exercise_image_file = _ExerciseImageFile(stripped_text)
# Setup link to assessment item
exercise_image_file.assessment_item = self
# Process file to make the replacement_str available
_filename = exercise_image_file.process_file()
# Get `new_text` = the replacement path for the image resource
new_text = exercises.CONTENT_STORAGE_FORMAT.format(exercise_image_file.get_replacement_str())
if is_web_plus_graphie: # need to put back the `web+graphie:` prefix
new_text = "web+graphie:" + new_text
return new_text, [exercise_image_file] | def function[set_image, parameter[self, text]]:
constant[
Save image resource at `text` (path or url) to storage, then return the
replacement string and the necessary exercicse image file object.
Args:
- text (str): path or url to parse as an exercise image resource
Returns: (new_text, files)
- `new_text` (str): replacement string for the original `text` string
- `files` (list): list of files that were downloaded from `text`
]
if compare[name[exercises].CONTENT_STORAGE_PLACEHOLDER in name[text]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b0c45300>, <ast.List object at 0x7da1b0c47670>]]]
variable[stripped_text] assign[=] call[call[name[text].strip, parameter[]].replace, parameter[constant[\n], constant[]]]
variable[graphie_regex] assign[=] call[name[re].compile, parameter[name[WEB_GRAPHIE_URL_REGEX]]]
variable[graphie_match] assign[=] call[name[graphie_regex].match, parameter[name[stripped_text]]]
if name[graphie_match] begin[:]
variable[is_web_plus_graphie] assign[=] constant[True]
variable[graphie_rawpath] assign[=] call[call[name[graphie_match].groupdict, parameter[]]][constant[rawpath]]
variable[graphie_path] assign[=] call[name[graphie_rawpath].replace, parameter[constant[//], constant[https://]]]
variable[exercise_image_file] assign[=] call[name[_ExerciseGraphieFile], parameter[name[graphie_path]]]
name[exercise_image_file].assessment_item assign[=] name[self]
variable[_filename] assign[=] call[name[exercise_image_file].process_file, parameter[]]
variable[new_text] assign[=] call[name[exercises].CONTENT_STORAGE_FORMAT.format, parameter[call[name[exercise_image_file].get_replacement_str, parameter[]]]]
if name[is_web_plus_graphie] begin[:]
variable[new_text] assign[=] binary_operation[constant[web+graphie:] + name[new_text]]
return[tuple[[<ast.Name object at 0x7da207f9ad40>, <ast.List object at 0x7da207f9a8f0>]]] | keyword[def] identifier[set_image] ( identifier[self] , identifier[text] ):
literal[string]
keyword[if] identifier[exercises] . identifier[CONTENT_STORAGE_PLACEHOLDER] keyword[in] identifier[text] :
keyword[return] identifier[text] ,[]
identifier[stripped_text] = identifier[text] . identifier[strip] (). identifier[replace] ( literal[string] , literal[string] )
identifier[graphie_regex] = identifier[re] . identifier[compile] ( identifier[WEB_GRAPHIE_URL_REGEX] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )
identifier[graphie_match] = identifier[graphie_regex] . identifier[match] ( identifier[stripped_text] )
keyword[if] identifier[graphie_match] :
identifier[is_web_plus_graphie] = keyword[True]
identifier[graphie_rawpath] = identifier[graphie_match] . identifier[groupdict] ()[ literal[string] ]
identifier[graphie_path] = identifier[graphie_rawpath] . identifier[replace] ( literal[string] , literal[string] )
identifier[exercise_image_file] = identifier[_ExerciseGraphieFile] ( identifier[graphie_path] )
keyword[elif] identifier[get_base64_encoding] ( identifier[stripped_text] ):
identifier[is_web_plus_graphie] = keyword[False]
identifier[exercise_image_file] = identifier[_ExerciseBase64ImageFile] ( identifier[stripped_text] )
keyword[else] :
identifier[is_web_plus_graphie] = keyword[False]
identifier[exercise_image_file] = identifier[_ExerciseImageFile] ( identifier[stripped_text] )
identifier[exercise_image_file] . identifier[assessment_item] = identifier[self]
identifier[_filename] = identifier[exercise_image_file] . identifier[process_file] ()
identifier[new_text] = identifier[exercises] . identifier[CONTENT_STORAGE_FORMAT] . identifier[format] ( identifier[exercise_image_file] . identifier[get_replacement_str] ())
keyword[if] identifier[is_web_plus_graphie] :
identifier[new_text] = literal[string] + identifier[new_text]
keyword[return] identifier[new_text] ,[ identifier[exercise_image_file] ] | def set_image(self, text):
"""
Save image resource at `text` (path or url) to storage, then return the
replacement string and the necessary exercicse image file object.
Args:
- text (str): path or url to parse as an exercise image resource
Returns: (new_text, files)
- `new_text` (str): replacement string for the original `text` string
- `files` (list): list of files that were downloaded from `text`
"""
# Make sure `text` hasn't already been processed
if exercises.CONTENT_STORAGE_PLACEHOLDER in text:
return (text, []) # depends on [control=['if'], data=['text']]
# Strip `text` of whitespace
stripped_text = text.strip().replace('\\n', '')
# If `stripped_text` is a web+graphie: path, we need special processing
graphie_regex = re.compile(WEB_GRAPHIE_URL_REGEX, flags=re.IGNORECASE)
graphie_match = graphie_regex.match(stripped_text)
if graphie_match:
is_web_plus_graphie = True
graphie_rawpath = graphie_match.groupdict()['rawpath']
graphie_path = graphie_rawpath.replace('//', 'https://')
exercise_image_file = _ExerciseGraphieFile(graphie_path) # depends on [control=['if'], data=[]]
elif get_base64_encoding(stripped_text):
is_web_plus_graphie = False
exercise_image_file = _ExerciseBase64ImageFile(stripped_text) # depends on [control=['if'], data=[]]
else:
is_web_plus_graphie = False
exercise_image_file = _ExerciseImageFile(stripped_text)
# Setup link to assessment item
exercise_image_file.assessment_item = self
# Process file to make the replacement_str available
_filename = exercise_image_file.process_file()
# Get `new_text` = the replacement path for the image resource
new_text = exercises.CONTENT_STORAGE_FORMAT.format(exercise_image_file.get_replacement_str())
if is_web_plus_graphie: # need to put back the `web+graphie:` prefix
new_text = 'web+graphie:' + new_text # depends on [control=['if'], data=[]]
return (new_text, [exercise_image_file]) |
def evaluate_parameter_sets(self):
"""
This takes the parameter sets of the model instance and evaluates any formulas using the parameter values to create a
fixed, full set of parameters for each parameter set in the model
"""
#parameter_interpreter = ParameterInterpreter(self.modelInstance)
#parameter_interpreter.evaluate_parameter_sets()
self.parameter_interpreter = LcoptParameterSet(self.modelInstance)
self.modelInstance.evaluated_parameter_sets = self.parameter_interpreter.evaluated_parameter_sets
self.modelInstance.bw2_export_params = self.parameter_interpreter.bw2_export_params | def function[evaluate_parameter_sets, parameter[self]]:
constant[
This takes the parameter sets of the model instance and evaluates any formulas using the parameter values to create a
fixed, full set of parameters for each parameter set in the model
]
name[self].parameter_interpreter assign[=] call[name[LcoptParameterSet], parameter[name[self].modelInstance]]
name[self].modelInstance.evaluated_parameter_sets assign[=] name[self].parameter_interpreter.evaluated_parameter_sets
name[self].modelInstance.bw2_export_params assign[=] name[self].parameter_interpreter.bw2_export_params | keyword[def] identifier[evaluate_parameter_sets] ( identifier[self] ):
literal[string]
identifier[self] . identifier[parameter_interpreter] = identifier[LcoptParameterSet] ( identifier[self] . identifier[modelInstance] )
identifier[self] . identifier[modelInstance] . identifier[evaluated_parameter_sets] = identifier[self] . identifier[parameter_interpreter] . identifier[evaluated_parameter_sets]
identifier[self] . identifier[modelInstance] . identifier[bw2_export_params] = identifier[self] . identifier[parameter_interpreter] . identifier[bw2_export_params] | def evaluate_parameter_sets(self):
"""
This takes the parameter sets of the model instance and evaluates any formulas using the parameter values to create a
fixed, full set of parameters for each parameter set in the model
"""
#parameter_interpreter = ParameterInterpreter(self.modelInstance)
#parameter_interpreter.evaluate_parameter_sets()
self.parameter_interpreter = LcoptParameterSet(self.modelInstance)
self.modelInstance.evaluated_parameter_sets = self.parameter_interpreter.evaluated_parameter_sets
self.modelInstance.bw2_export_params = self.parameter_interpreter.bw2_export_params |
def device_event_list(self, **kwargs): # noqa: E501
"""List all device events. # noqa: E501
List all device events for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.device_event_list(asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param int limit: How many objects to retrieve in the page.
:param str order: The order of the records based on creation time, `ASC` or `DESC`; by default `ASC`.
:param str after: The ID of The item after which to retrieve the next page.
:param str include: Comma-separated list of data fields to return. Currently supported: `total_count`
:param str filter: URL encoded query string parameter to filter returned data. ##### Filtering ```?filter={URL encoded query string}``` The query string is made up of key/value pairs separated by ampersands. So for a query of ```key1=value1&key2=value2&key3=value3``` this would be encoded as follows: ```?filter=key1%3Dvalue1%26key2%3Dvalue2%26key3%3Dvalue3``` ###### Filterable fields: The below table lists all the fields that can be filtered on with certain filters: <table> <thead> <tr> <th>Field</th> <th>= / __eq / __neq</th> <th>__in / __nin</th> <th>__lte / __gte</th> <tr> <thead> <tbody> <tr> <td>date_time</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>description</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>device_id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>event_type</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>state_change</td> <td>✓</td> <td>✓</td> <td> </td> </tr> </tbody> </table> The examples below show the queries in *unencoded* form. ###### By id: ```id={id}``` ###### By state change: ```state_change=[True|False]``` ###### By event type: ```event_type={value}``` ###### On date-time fields: Date-time fields should be specified in UTC RFC3339 format ```YYYY-MM-DDThh:mm:ss.msZ```. There are three permitted variations: * UTC RFC3339 with milliseconds e.g. 2016-11-30T16:25:12.1234Z * UTC RFC3339 without milliseconds e.g. 2016-11-30T16:25:12Z * UTC RFC3339 shortened - without milliseconds and punctuation e.g. 20161130T162512Z Date-time filtering supports three operators: * equality * greater than or equal to – field name suffixed with ```__gte``` * less than or equal to – field name suffixed with ```__lte``` Lower and upper limits to a date-time range may be specified by including both the ```__gte``` and ```__lte``` forms in the filter. ```{field name}[|__lte|__gte]={UTC RFC3339 date-time}``` ##### Multi-field example ```id=0158d38771f70000000000010010038c&state_change=True&date_time__gte=2016-11-30T16:25:12.1234Z``` Encoded: ```?filter=id%3D0158d38771f70000000000010010038c%26state_change%3DTrue%26date_time__gte%3D2016-11-30T16%3A25%3A12.1234Z``` ##### Filtering with filter operators String field filtering supports the following operators: * equality: `__eq` * non-equality: `__neq` * in : `__in` * not in: `__nin` For `__in` and `__nin` filters list of parameters must be comma-separated: `event_type__in=update.device.device-created,update.device.device-updated`
:return: DeviceEventPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.device_event_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.device_event_list_with_http_info(**kwargs) # noqa: E501
return data | def function[device_event_list, parameter[self]]:
constant[List all device events. # noqa: E501
List all device events for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.device_event_list(asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param int limit: How many objects to retrieve in the page.
:param str order: The order of the records based on creation time, `ASC` or `DESC`; by default `ASC`.
:param str after: The ID of The item after which to retrieve the next page.
:param str include: Comma-separated list of data fields to return. Currently supported: `total_count`
:param str filter: URL encoded query string parameter to filter returned data. ##### Filtering ```?filter={URL encoded query string}``` The query string is made up of key/value pairs separated by ampersands. So for a query of ```key1=value1&key2=value2&key3=value3``` this would be encoded as follows: ```?filter=key1%3Dvalue1%26key2%3Dvalue2%26key3%3Dvalue3``` ###### Filterable fields: The below table lists all the fields that can be filtered on with certain filters: <table> <thead> <tr> <th>Field</th> <th>= / __eq / __neq</th> <th>__in / __nin</th> <th>__lte / __gte</th> <tr> <thead> <tbody> <tr> <td>date_time</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>description</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>device_id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>event_type</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>state_change</td> <td>✓</td> <td>✓</td> <td> </td> </tr> </tbody> </table> The examples below show the queries in *unencoded* form. ###### By id: ```id={id}``` ###### By state change: ```state_change=[True|False]``` ###### By event type: ```event_type={value}``` ###### On date-time fields: Date-time fields should be specified in UTC RFC3339 format ```YYYY-MM-DDThh:mm:ss.msZ```. There are three permitted variations: * UTC RFC3339 with milliseconds e.g. 2016-11-30T16:25:12.1234Z * UTC RFC3339 without milliseconds e.g. 2016-11-30T16:25:12Z * UTC RFC3339 shortened - without milliseconds and punctuation e.g. 20161130T162512Z Date-time filtering supports three operators: * equality * greater than or equal to – field name suffixed with ```__gte``` * less than or equal to – field name suffixed with ```__lte``` Lower and upper limits to a date-time range may be specified by including both the ```__gte``` and ```__lte``` forms in the filter. ```{field name}[|__lte|__gte]={UTC RFC3339 date-time}``` ##### Multi-field example ```id=0158d38771f70000000000010010038c&state_change=True&date_time__gte=2016-11-30T16:25:12.1234Z``` Encoded: ```?filter=id%3D0158d38771f70000000000010010038c%26state_change%3DTrue%26date_time__gte%3D2016-11-30T16%3A25%3A12.1234Z``` ##### Filtering with filter operators String field filtering supports the following operators: * equality: `__eq` * non-equality: `__neq` * in : `__in` * not in: `__nin` For `__in` and `__nin` filters list of parameters must be comma-separated: `event_type__in=update.device.device-created,update.device.device-updated`
:return: DeviceEventPage
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].device_event_list_with_http_info, parameter[]]] | keyword[def] identifier[device_event_list] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[device_event_list_with_http_info] (** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[device_event_list_with_http_info] (** identifier[kwargs] )
keyword[return] identifier[data] | def device_event_list(self, **kwargs): # noqa: E501
'List all device events. # noqa: E501\n\n List all device events for an account. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.device_event_list(asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param int limit: How many objects to retrieve in the page.\n :param str order: The order of the records based on creation time, `ASC` or `DESC`; by default `ASC`.\n :param str after: The ID of The item after which to retrieve the next page.\n :param str include: Comma-separated list of data fields to return. Currently supported: `total_count`\n :param str filter: URL encoded query string parameter to filter returned data. ##### Filtering ```?filter={URL encoded query string}``` The query string is made up of key/value pairs separated by ampersands. So for a query of ```key1=value1&key2=value2&key3=value3``` this would be encoded as follows: ```?filter=key1%3Dvalue1%26key2%3Dvalue2%26key3%3Dvalue3``` ###### Filterable fields: The below table lists all the fields that can be filtered on with certain filters: <table> <thead> <tr> <th>Field</th> <th>= / __eq / __neq</th> <th>__in / __nin</th> <th>__lte / __gte</th> <tr> <thead> <tbody> <tr> <td>date_time</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>description</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>device_id</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>event_type</td> <td>✓</td> <td>✓</td> <td> </td> </tr> <tr> <td>state_change</td> <td>✓</td> <td>✓</td> <td> </td> </tr> </tbody> </table> The examples below show the queries in *unencoded* form. ###### By id: ```id={id}``` ###### By state change: ```state_change=[True|False]``` ###### By event type: ```event_type={value}``` ###### On date-time fields: Date-time fields should be specified in UTC RFC3339 format ```YYYY-MM-DDThh:mm:ss.msZ```. There are three permitted variations: * UTC RFC3339 with milliseconds e.g. 2016-11-30T16:25:12.1234Z * UTC RFC3339 without milliseconds e.g. 2016-11-30T16:25:12Z * UTC RFC3339 shortened - without milliseconds and punctuation e.g. 20161130T162512Z Date-time filtering supports three operators: * equality * greater than or equal to – field name suffixed with ```__gte``` * less than or equal to – field name suffixed with ```__lte``` Lower and upper limits to a date-time range may be specified by including both the ```__gte``` and ```__lte``` forms in the filter. ```{field name}[|__lte|__gte]={UTC RFC3339 date-time}``` ##### Multi-field example ```id=0158d38771f70000000000010010038c&state_change=True&date_time__gte=2016-11-30T16:25:12.1234Z``` Encoded: ```?filter=id%3D0158d38771f70000000000010010038c%26state_change%3DTrue%26date_time__gte%3D2016-11-30T16%3A25%3A12.1234Z``` ##### Filtering with filter operators String field filtering supports the following operators: * equality: `__eq` * non-equality: `__neq` * in : `__in` * not in: `__nin` For `__in` and `__nin` filters list of parameters must be comma-separated: `event_type__in=update.device.device-created,update.device.device-updated`\n :return: DeviceEventPage\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.device_event_list_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.device_event_list_with_http_info(**kwargs) # noqa: E501
return data |
def get_label(self, lang='en'):
"""
Returns the label for a certain language
:param lang:
:type lang: str
:return: returns the label in the specified language, an empty string if the label does not exist
"""
if self.fast_run:
return list(self.fast_run_container.get_language_data(self.wd_item_id, lang, 'label'))[0]
try:
return self.wd_json_representation['labels'][lang]['value']
except KeyError:
return '' | def function[get_label, parameter[self, lang]]:
constant[
Returns the label for a certain language
:param lang:
:type lang: str
:return: returns the label in the specified language, an empty string if the label does not exist
]
if name[self].fast_run begin[:]
return[call[call[name[list], parameter[call[name[self].fast_run_container.get_language_data, parameter[name[self].wd_item_id, name[lang], constant[label]]]]]][constant[0]]]
<ast.Try object at 0x7da20c7c9870> | keyword[def] identifier[get_label] ( identifier[self] , identifier[lang] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[fast_run] :
keyword[return] identifier[list] ( identifier[self] . identifier[fast_run_container] . identifier[get_language_data] ( identifier[self] . identifier[wd_item_id] , identifier[lang] , literal[string] ))[ literal[int] ]
keyword[try] :
keyword[return] identifier[self] . identifier[wd_json_representation] [ literal[string] ][ identifier[lang] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[return] literal[string] | def get_label(self, lang='en'):
"""
Returns the label for a certain language
:param lang:
:type lang: str
:return: returns the label in the specified language, an empty string if the label does not exist
"""
if self.fast_run:
return list(self.fast_run_container.get_language_data(self.wd_item_id, lang, 'label'))[0] # depends on [control=['if'], data=[]]
try:
return self.wd_json_representation['labels'][lang]['value'] # depends on [control=['try'], data=[]]
except KeyError:
return '' # depends on [control=['except'], data=[]] |
def _validate_codeblock_size(self, cparams):
"""
Code block dimensions must satisfy certain restrictions.
They must both be a power of 2 and the total area defined by the width
and height cannot be either too great or too small for the codec.
"""
if cparams.cblockw_init != 0 and cparams.cblockh_init != 0:
# These fields ARE zero if uninitialized.
width = cparams.cblockw_init
height = cparams.cblockh_init
if height * width > 4096 or height < 4 or width < 4:
msg = ("The code block area is specified as "
"{height} x {width} = {area} square pixels. "
"Code block area cannot exceed 4096 square pixels. "
"Code block height and width dimensions must be larger "
"than 4 pixels.")
msg = msg.format(height=height, width=width,
area=height * width)
raise IOError(msg)
if ((math.log(height, 2) != math.floor(math.log(height, 2)) or
math.log(width, 2) != math.floor(math.log(width, 2)))):
msg = ("Bad code block size ({height} x {width}). "
"The dimensions must be powers of 2.")
msg = msg.format(height=height, width=width)
raise IOError(msg) | def function[_validate_codeblock_size, parameter[self, cparams]]:
constant[
Code block dimensions must satisfy certain restrictions.
They must both be a power of 2 and the total area defined by the width
and height cannot be either too great or too small for the codec.
]
if <ast.BoolOp object at 0x7da20c6c7580> begin[:]
variable[width] assign[=] name[cparams].cblockw_init
variable[height] assign[=] name[cparams].cblockh_init
if <ast.BoolOp object at 0x7da20c6c7910> begin[:]
variable[msg] assign[=] constant[The code block area is specified as {height} x {width} = {area} square pixels. Code block area cannot exceed 4096 square pixels. Code block height and width dimensions must be larger than 4 pixels.]
variable[msg] assign[=] call[name[msg].format, parameter[]]
<ast.Raise object at 0x7da20c6c7af0>
if <ast.BoolOp object at 0x7da20c6c4310> begin[:]
variable[msg] assign[=] constant[Bad code block size ({height} x {width}). The dimensions must be powers of 2.]
variable[msg] assign[=] call[name[msg].format, parameter[]]
<ast.Raise object at 0x7da20c6c76d0> | keyword[def] identifier[_validate_codeblock_size] ( identifier[self] , identifier[cparams] ):
literal[string]
keyword[if] identifier[cparams] . identifier[cblockw_init] != literal[int] keyword[and] identifier[cparams] . identifier[cblockh_init] != literal[int] :
identifier[width] = identifier[cparams] . identifier[cblockw_init]
identifier[height] = identifier[cparams] . identifier[cblockh_init]
keyword[if] identifier[height] * identifier[width] > literal[int] keyword[or] identifier[height] < literal[int] keyword[or] identifier[width] < literal[int] :
identifier[msg] =( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] )
identifier[msg] = identifier[msg] . identifier[format] ( identifier[height] = identifier[height] , identifier[width] = identifier[width] ,
identifier[area] = identifier[height] * identifier[width] )
keyword[raise] identifier[IOError] ( identifier[msg] )
keyword[if] (( identifier[math] . identifier[log] ( identifier[height] , literal[int] )!= identifier[math] . identifier[floor] ( identifier[math] . identifier[log] ( identifier[height] , literal[int] )) keyword[or]
identifier[math] . identifier[log] ( identifier[width] , literal[int] )!= identifier[math] . identifier[floor] ( identifier[math] . identifier[log] ( identifier[width] , literal[int] )))):
identifier[msg] =( literal[string]
literal[string] )
identifier[msg] = identifier[msg] . identifier[format] ( identifier[height] = identifier[height] , identifier[width] = identifier[width] )
keyword[raise] identifier[IOError] ( identifier[msg] ) | def _validate_codeblock_size(self, cparams):
"""
Code block dimensions must satisfy certain restrictions.
They must both be a power of 2 and the total area defined by the width
and height cannot be either too great or too small for the codec.
"""
if cparams.cblockw_init != 0 and cparams.cblockh_init != 0:
# These fields ARE zero if uninitialized.
width = cparams.cblockw_init
height = cparams.cblockh_init
if height * width > 4096 or height < 4 or width < 4:
msg = 'The code block area is specified as {height} x {width} = {area} square pixels. Code block area cannot exceed 4096 square pixels. Code block height and width dimensions must be larger than 4 pixels.'
msg = msg.format(height=height, width=width, area=height * width)
raise IOError(msg) # depends on [control=['if'], data=[]]
if math.log(height, 2) != math.floor(math.log(height, 2)) or math.log(width, 2) != math.floor(math.log(width, 2)):
msg = 'Bad code block size ({height} x {width}). The dimensions must be powers of 2.'
msg = msg.format(height=height, width=width)
raise IOError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
sign = '+'
if self.__offset < datetime.timedelta():
sign = '-'
# total_seconds was introduced in Python 2.7
if hasattr(self.__offset, 'total_seconds'):
total_seconds = self.__offset.total_seconds()
else:
total_seconds = (self.__offset.days * 24 * 60 * 60) + \
(self.__offset.seconds) + \
(self.__offset.microseconds / 1000000.0)
hours = total_seconds // (60 * 60)
total_seconds -= hours * 60 * 60
minutes = total_seconds // 60
total_seconds -= minutes * 60
seconds = total_seconds // 1
total_seconds -= seconds
if seconds:
return '%s%02d:%02d:%02d' % (sign, hours, minutes, seconds)
else:
return '%s%02d:%02d' % (sign, hours, minutes) | def function[tzname, parameter[self, dt]]:
constant[
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
]
variable[sign] assign[=] constant[+]
if compare[name[self].__offset less[<] call[name[datetime].timedelta, parameter[]]] begin[:]
variable[sign] assign[=] constant[-]
if call[name[hasattr], parameter[name[self].__offset, constant[total_seconds]]] begin[:]
variable[total_seconds] assign[=] call[name[self].__offset.total_seconds, parameter[]]
variable[hours] assign[=] binary_operation[name[total_seconds] <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[constant[60] * constant[60]]]
<ast.AugAssign object at 0x7da1b074e350>
variable[minutes] assign[=] binary_operation[name[total_seconds] <ast.FloorDiv object at 0x7da2590d6bc0> constant[60]]
<ast.AugAssign object at 0x7da1b074d090>
variable[seconds] assign[=] binary_operation[name[total_seconds] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1]]
<ast.AugAssign object at 0x7da18ede4a30>
if name[seconds] begin[:]
return[binary_operation[constant[%s%02d:%02d:%02d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede5600>, <ast.Name object at 0x7da18ede48b0>, <ast.Name object at 0x7da18ede50f0>, <ast.Name object at 0x7da18ede6b60>]]]] | keyword[def] identifier[tzname] ( identifier[self] , identifier[dt] ):
literal[string]
identifier[sign] = literal[string]
keyword[if] identifier[self] . identifier[__offset] < identifier[datetime] . identifier[timedelta] ():
identifier[sign] = literal[string]
keyword[if] identifier[hasattr] ( identifier[self] . identifier[__offset] , literal[string] ):
identifier[total_seconds] = identifier[self] . identifier[__offset] . identifier[total_seconds] ()
keyword[else] :
identifier[total_seconds] =( identifier[self] . identifier[__offset] . identifier[days] * literal[int] * literal[int] * literal[int] )+( identifier[self] . identifier[__offset] . identifier[seconds] )+( identifier[self] . identifier[__offset] . identifier[microseconds] / literal[int] )
identifier[hours] = identifier[total_seconds] //( literal[int] * literal[int] )
identifier[total_seconds] -= identifier[hours] * literal[int] * literal[int]
identifier[minutes] = identifier[total_seconds] // literal[int]
identifier[total_seconds] -= identifier[minutes] * literal[int]
identifier[seconds] = identifier[total_seconds] // literal[int]
identifier[total_seconds] -= identifier[seconds]
keyword[if] identifier[seconds] :
keyword[return] literal[string] %( identifier[sign] , identifier[hours] , identifier[minutes] , identifier[seconds] )
keyword[else] :
keyword[return] literal[string] %( identifier[sign] , identifier[hours] , identifier[minutes] ) | def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
sign = '+'
if self.__offset < datetime.timedelta():
sign = '-' # depends on [control=['if'], data=[]]
# total_seconds was introduced in Python 2.7
if hasattr(self.__offset, 'total_seconds'):
total_seconds = self.__offset.total_seconds() # depends on [control=['if'], data=[]]
else:
total_seconds = self.__offset.days * 24 * 60 * 60 + self.__offset.seconds + self.__offset.microseconds / 1000000.0
hours = total_seconds // (60 * 60)
total_seconds -= hours * 60 * 60
minutes = total_seconds // 60
total_seconds -= minutes * 60
seconds = total_seconds // 1
total_seconds -= seconds
if seconds:
return '%s%02d:%02d:%02d' % (sign, hours, minutes, seconds) # depends on [control=['if'], data=[]]
else:
return '%s%02d:%02d' % (sign, hours, minutes) |
def get_top_assets(self):
"""
Gets images and videos to populate top assets.
Map is built separately.
"""
images = self.get_all_images()[0:14]
video = []
if supports_video:
video = self.eventvideo_set.all()[0:10]
return list(chain(images, video))[0:15] | def function[get_top_assets, parameter[self]]:
constant[
Gets images and videos to populate top assets.
Map is built separately.
]
variable[images] assign[=] call[call[name[self].get_all_images, parameter[]]][<ast.Slice object at 0x7da20c6c6830>]
variable[video] assign[=] list[[]]
if name[supports_video] begin[:]
variable[video] assign[=] call[call[name[self].eventvideo_set.all, parameter[]]][<ast.Slice object at 0x7da20c6c6650>]
return[call[call[name[list], parameter[call[name[chain], parameter[name[images], name[video]]]]]][<ast.Slice object at 0x7da20c6c6c20>]] | keyword[def] identifier[get_top_assets] ( identifier[self] ):
literal[string]
identifier[images] = identifier[self] . identifier[get_all_images] ()[ literal[int] : literal[int] ]
identifier[video] =[]
keyword[if] identifier[supports_video] :
identifier[video] = identifier[self] . identifier[eventvideo_set] . identifier[all] ()[ literal[int] : literal[int] ]
keyword[return] identifier[list] ( identifier[chain] ( identifier[images] , identifier[video] ))[ literal[int] : literal[int] ] | def get_top_assets(self):
"""
Gets images and videos to populate top assets.
Map is built separately.
"""
images = self.get_all_images()[0:14]
video = []
if supports_video:
video = self.eventvideo_set.all()[0:10] # depends on [control=['if'], data=[]]
return list(chain(images, video))[0:15] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.