code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def callback(self, request, **kwargs):
"""
Called from the Service when the user accept to activate it
"""
try:
client = self.get_evernote_client()
# finally we save the user auth token
# As we already stored the object ServicesActivated
# from the UserServiceCreateView now we update the same
# object to the database so :
# 1) we get the previous object
us = UserService.objects.get(user=request.user, name=ServicesActivated.objects.get(name='ServiceEvernote'))
# 2) then get the token
us.token = client.get_access_token(request.session['oauth_token'], request.session['oauth_token_secret'],
request.GET.get('oauth_verifier', ''))
# 3) and save everything
us.save()
except KeyError:
return '/'
return 'evernote/callback.html' | def function[callback, parameter[self, request]]:
constant[
Called from the Service when the user accept to activate it
]
<ast.Try object at 0x7da1b26ae890>
return[constant[evernote/callback.html]] | keyword[def] identifier[callback] ( identifier[self] , identifier[request] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[client] = identifier[self] . identifier[get_evernote_client] ()
identifier[us] = identifier[UserService] . identifier[objects] . identifier[get] ( identifier[user] = identifier[request] . identifier[user] , identifier[name] = identifier[ServicesActivated] . identifier[objects] . identifier[get] ( identifier[name] = literal[string] ))
identifier[us] . identifier[token] = identifier[client] . identifier[get_access_token] ( identifier[request] . identifier[session] [ literal[string] ], identifier[request] . identifier[session] [ literal[string] ],
identifier[request] . identifier[GET] . identifier[get] ( literal[string] , literal[string] ))
identifier[us] . identifier[save] ()
keyword[except] identifier[KeyError] :
keyword[return] literal[string]
keyword[return] literal[string] | def callback(self, request, **kwargs):
"""
Called from the Service when the user accept to activate it
"""
try:
client = self.get_evernote_client()
# finally we save the user auth token
# As we already stored the object ServicesActivated
# from the UserServiceCreateView now we update the same
# object to the database so :
# 1) we get the previous object
us = UserService.objects.get(user=request.user, name=ServicesActivated.objects.get(name='ServiceEvernote'))
# 2) then get the token
us.token = client.get_access_token(request.session['oauth_token'], request.session['oauth_token_secret'], request.GET.get('oauth_verifier', ''))
# 3) and save everything
us.save() # depends on [control=['try'], data=[]]
except KeyError:
return '/' # depends on [control=['except'], data=[]]
return 'evernote/callback.html' |
def from_content(cls, content):
"""Creates an instance of the class from the HTML content of the guild's page.
Parameters
-----------
content: :class:`str`
The HTML content of the page.
Returns
----------
:class:`Guild`
The guild contained in the page or None if it doesn't exist.
Raises
------
InvalidContent
If content is not the HTML of a guild's page.
"""
if "An internal error has occurred" in content:
return None
parsed_content = parse_tibiacom_content(content)
try:
name_header = parsed_content.find('h1')
guild = Guild(name_header.text.strip())
except AttributeError:
raise InvalidContent("content does not belong to a Tibia.com guild page.")
if not guild._parse_logo(parsed_content):
raise InvalidContent("content does not belong to a Tibia.com guild page.")
info_container = parsed_content.find("div", id="GuildInformationContainer")
guild._parse_guild_info(info_container)
guild._parse_application_info(info_container)
guild._parse_guild_homepage(info_container)
guild._parse_guild_guildhall(info_container)
guild._parse_guild_disband_info(info_container)
guild._parse_guild_members(parsed_content)
if guild.guildhall and guild.members:
guild.guildhall.owner = guild.members[0].name
return guild | def function[from_content, parameter[cls, content]]:
constant[Creates an instance of the class from the HTML content of the guild's page.
Parameters
-----------
content: :class:`str`
The HTML content of the page.
Returns
----------
:class:`Guild`
The guild contained in the page or None if it doesn't exist.
Raises
------
InvalidContent
If content is not the HTML of a guild's page.
]
if compare[constant[An internal error has occurred] in name[content]] begin[:]
return[constant[None]]
variable[parsed_content] assign[=] call[name[parse_tibiacom_content], parameter[name[content]]]
<ast.Try object at 0x7da18f09cdf0>
if <ast.UnaryOp object at 0x7da18f09ceb0> begin[:]
<ast.Raise object at 0x7da18f09c220>
variable[info_container] assign[=] call[name[parsed_content].find, parameter[constant[div]]]
call[name[guild]._parse_guild_info, parameter[name[info_container]]]
call[name[guild]._parse_application_info, parameter[name[info_container]]]
call[name[guild]._parse_guild_homepage, parameter[name[info_container]]]
call[name[guild]._parse_guild_guildhall, parameter[name[info_container]]]
call[name[guild]._parse_guild_disband_info, parameter[name[info_container]]]
call[name[guild]._parse_guild_members, parameter[name[parsed_content]]]
if <ast.BoolOp object at 0x7da18fe90940> begin[:]
name[guild].guildhall.owner assign[=] call[name[guild].members][constant[0]].name
return[name[guild]] | keyword[def] identifier[from_content] ( identifier[cls] , identifier[content] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[content] :
keyword[return] keyword[None]
identifier[parsed_content] = identifier[parse_tibiacom_content] ( identifier[content] )
keyword[try] :
identifier[name_header] = identifier[parsed_content] . identifier[find] ( literal[string] )
identifier[guild] = identifier[Guild] ( identifier[name_header] . identifier[text] . identifier[strip] ())
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[InvalidContent] ( literal[string] )
keyword[if] keyword[not] identifier[guild] . identifier[_parse_logo] ( identifier[parsed_content] ):
keyword[raise] identifier[InvalidContent] ( literal[string] )
identifier[info_container] = identifier[parsed_content] . identifier[find] ( literal[string] , identifier[id] = literal[string] )
identifier[guild] . identifier[_parse_guild_info] ( identifier[info_container] )
identifier[guild] . identifier[_parse_application_info] ( identifier[info_container] )
identifier[guild] . identifier[_parse_guild_homepage] ( identifier[info_container] )
identifier[guild] . identifier[_parse_guild_guildhall] ( identifier[info_container] )
identifier[guild] . identifier[_parse_guild_disband_info] ( identifier[info_container] )
identifier[guild] . identifier[_parse_guild_members] ( identifier[parsed_content] )
keyword[if] identifier[guild] . identifier[guildhall] keyword[and] identifier[guild] . identifier[members] :
identifier[guild] . identifier[guildhall] . identifier[owner] = identifier[guild] . identifier[members] [ literal[int] ]. identifier[name]
keyword[return] identifier[guild] | def from_content(cls, content):
"""Creates an instance of the class from the HTML content of the guild's page.
Parameters
-----------
content: :class:`str`
The HTML content of the page.
Returns
----------
:class:`Guild`
The guild contained in the page or None if it doesn't exist.
Raises
------
InvalidContent
If content is not the HTML of a guild's page.
"""
if 'An internal error has occurred' in content:
return None # depends on [control=['if'], data=[]]
parsed_content = parse_tibiacom_content(content)
try:
name_header = parsed_content.find('h1')
guild = Guild(name_header.text.strip()) # depends on [control=['try'], data=[]]
except AttributeError:
raise InvalidContent('content does not belong to a Tibia.com guild page.') # depends on [control=['except'], data=[]]
if not guild._parse_logo(parsed_content):
raise InvalidContent('content does not belong to a Tibia.com guild page.') # depends on [control=['if'], data=[]]
info_container = parsed_content.find('div', id='GuildInformationContainer')
guild._parse_guild_info(info_container)
guild._parse_application_info(info_container)
guild._parse_guild_homepage(info_container)
guild._parse_guild_guildhall(info_container)
guild._parse_guild_disband_info(info_container)
guild._parse_guild_members(parsed_content)
if guild.guildhall and guild.members:
guild.guildhall.owner = guild.members[0].name # depends on [control=['if'], data=[]]
return guild |
def get_changelog_types(pr_payload):
"""
Fetch the labels from the PR and process the ones related to the changelog.
"""
changelog_labels = []
for name in get_pr_labels(pr_payload):
if name.startswith(CHANGELOG_LABEL_PREFIX):
# only add the name, e.g. for `changelog/Added` it's just `Added`
changelog_labels.append(name.split(CHANGELOG_LABEL_PREFIX)[1])
return changelog_labels | def function[get_changelog_types, parameter[pr_payload]]:
constant[
Fetch the labels from the PR and process the ones related to the changelog.
]
variable[changelog_labels] assign[=] list[[]]
for taget[name[name]] in starred[call[name[get_pr_labels], parameter[name[pr_payload]]]] begin[:]
if call[name[name].startswith, parameter[name[CHANGELOG_LABEL_PREFIX]]] begin[:]
call[name[changelog_labels].append, parameter[call[call[name[name].split, parameter[name[CHANGELOG_LABEL_PREFIX]]]][constant[1]]]]
return[name[changelog_labels]] | keyword[def] identifier[get_changelog_types] ( identifier[pr_payload] ):
literal[string]
identifier[changelog_labels] =[]
keyword[for] identifier[name] keyword[in] identifier[get_pr_labels] ( identifier[pr_payload] ):
keyword[if] identifier[name] . identifier[startswith] ( identifier[CHANGELOG_LABEL_PREFIX] ):
identifier[changelog_labels] . identifier[append] ( identifier[name] . identifier[split] ( identifier[CHANGELOG_LABEL_PREFIX] )[ literal[int] ])
keyword[return] identifier[changelog_labels] | def get_changelog_types(pr_payload):
"""
Fetch the labels from the PR and process the ones related to the changelog.
"""
changelog_labels = []
for name in get_pr_labels(pr_payload):
if name.startswith(CHANGELOG_LABEL_PREFIX):
# only add the name, e.g. for `changelog/Added` it's just `Added`
changelog_labels.append(name.split(CHANGELOG_LABEL_PREFIX)[1]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return changelog_labels |
def Start(self, Minimized=False, Nosplash=False):
"""Starts Skype application.
:Parameters:
Minimized : bool
If True, Skype is started minimized in system tray.
Nosplash : bool
If True, no splash screen is displayed upon startup.
"""
self._Skype._Api.startup(Minimized, Nosplash) | def function[Start, parameter[self, Minimized, Nosplash]]:
constant[Starts Skype application.
:Parameters:
Minimized : bool
If True, Skype is started minimized in system tray.
Nosplash : bool
If True, no splash screen is displayed upon startup.
]
call[name[self]._Skype._Api.startup, parameter[name[Minimized], name[Nosplash]]] | keyword[def] identifier[Start] ( identifier[self] , identifier[Minimized] = keyword[False] , identifier[Nosplash] = keyword[False] ):
literal[string]
identifier[self] . identifier[_Skype] . identifier[_Api] . identifier[startup] ( identifier[Minimized] , identifier[Nosplash] ) | def Start(self, Minimized=False, Nosplash=False):
"""Starts Skype application.
:Parameters:
Minimized : bool
If True, Skype is started minimized in system tray.
Nosplash : bool
If True, no splash screen is displayed upon startup.
"""
self._Skype._Api.startup(Minimized, Nosplash) |
def grid_at_redshift_from_image_plane_grid_and_redshift(self, image_plane_grid, redshift):
"""For an input grid of (y,x) arc-second image-plane coordinates, ray-trace the coordinates to any redshift in \
the strong lens configuration.
This is performed using multi-plane ray-tracing and the existing redshifts and planes of the tracer. However, \
any redshift can be input even if a plane does not exist there, including redshifts before the first plane \
of the lensing system.
Parameters
----------
image_plane_grid : ndsrray or grids.RegularGrid
The image-plane grid which is traced to the redshift.
redshift : float
The redshift the image-plane grid is traced to.
"""
# TODO : We need to come up with a better abstraction for multi-plane lensing 0_0
image_plane_grid_stack = grids.GridStack(regular=image_plane_grid, sub=np.array([[0.0, 0.0]]),
blurring=np.array([[0.0, 0.0]]))
tracer = TracerMultiPlanes(galaxies=self.galaxies, image_plane_grid_stack=image_plane_grid_stack,
border=None, cosmology=self.cosmology)
for plane_index in range(0, len(self.plane_redshifts)):
new_grid_stack = image_plane_grid_stack
if redshift <= tracer.plane_redshifts[plane_index]:
# If redshift is between two planes, we need to map over all previous planes coordinates / deflections.
if plane_index > 0:
for previous_plane_index in range(plane_index):
scaling_factor = cosmology_util.scaling_factor_between_redshifts_from_redshifts_and_cosmology(
redshift_0=tracer.plane_redshifts[previous_plane_index], redshift_1=redshift,
redshift_final=tracer.plane_redshifts[-1], cosmology=tracer.cosmology)
scaled_deflection_stack = lens_util.scaled_deflection_stack_from_plane_and_scaling_factor(
plane=tracer.planes[previous_plane_index], scaling_factor=scaling_factor)
new_grid_stack = \
lens_util.grid_stack_from_deflection_stack(grid_stack=new_grid_stack,
deflection_stack=scaled_deflection_stack)
# If redshift is before the first plane, no change to image pllane coordinates.
elif plane_index == 0:
return new_grid_stack.regular
return new_grid_stack.regular | def function[grid_at_redshift_from_image_plane_grid_and_redshift, parameter[self, image_plane_grid, redshift]]:
constant[For an input grid of (y,x) arc-second image-plane coordinates, ray-trace the coordinates to any redshift in the strong lens configuration.
This is performed using multi-plane ray-tracing and the existing redshifts and planes of the tracer. However, any redshift can be input even if a plane does not exist there, including redshifts before the first plane of the lensing system.
Parameters
----------
image_plane_grid : ndsrray or grids.RegularGrid
The image-plane grid which is traced to the redshift.
redshift : float
The redshift the image-plane grid is traced to.
]
variable[image_plane_grid_stack] assign[=] call[name[grids].GridStack, parameter[]]
variable[tracer] assign[=] call[name[TracerMultiPlanes], parameter[]]
for taget[name[plane_index]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[self].plane_redshifts]]]]] begin[:]
variable[new_grid_stack] assign[=] name[image_plane_grid_stack]
if compare[name[redshift] less_or_equal[<=] call[name[tracer].plane_redshifts][name[plane_index]]] begin[:]
if compare[name[plane_index] greater[>] constant[0]] begin[:]
for taget[name[previous_plane_index]] in starred[call[name[range], parameter[name[plane_index]]]] begin[:]
variable[scaling_factor] assign[=] call[name[cosmology_util].scaling_factor_between_redshifts_from_redshifts_and_cosmology, parameter[]]
variable[scaled_deflection_stack] assign[=] call[name[lens_util].scaled_deflection_stack_from_plane_and_scaling_factor, parameter[]]
variable[new_grid_stack] assign[=] call[name[lens_util].grid_stack_from_deflection_stack, parameter[]]
return[name[new_grid_stack].regular] | keyword[def] identifier[grid_at_redshift_from_image_plane_grid_and_redshift] ( identifier[self] , identifier[image_plane_grid] , identifier[redshift] ):
literal[string]
identifier[image_plane_grid_stack] = identifier[grids] . identifier[GridStack] ( identifier[regular] = identifier[image_plane_grid] , identifier[sub] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ]]),
identifier[blurring] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ]]))
identifier[tracer] = identifier[TracerMultiPlanes] ( identifier[galaxies] = identifier[self] . identifier[galaxies] , identifier[image_plane_grid_stack] = identifier[image_plane_grid_stack] ,
identifier[border] = keyword[None] , identifier[cosmology] = identifier[self] . identifier[cosmology] )
keyword[for] identifier[plane_index] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[plane_redshifts] )):
identifier[new_grid_stack] = identifier[image_plane_grid_stack]
keyword[if] identifier[redshift] <= identifier[tracer] . identifier[plane_redshifts] [ identifier[plane_index] ]:
keyword[if] identifier[plane_index] > literal[int] :
keyword[for] identifier[previous_plane_index] keyword[in] identifier[range] ( identifier[plane_index] ):
identifier[scaling_factor] = identifier[cosmology_util] . identifier[scaling_factor_between_redshifts_from_redshifts_and_cosmology] (
identifier[redshift_0] = identifier[tracer] . identifier[plane_redshifts] [ identifier[previous_plane_index] ], identifier[redshift_1] = identifier[redshift] ,
identifier[redshift_final] = identifier[tracer] . identifier[plane_redshifts] [- literal[int] ], identifier[cosmology] = identifier[tracer] . identifier[cosmology] )
identifier[scaled_deflection_stack] = identifier[lens_util] . identifier[scaled_deflection_stack_from_plane_and_scaling_factor] (
identifier[plane] = identifier[tracer] . identifier[planes] [ identifier[previous_plane_index] ], identifier[scaling_factor] = identifier[scaling_factor] )
identifier[new_grid_stack] = identifier[lens_util] . identifier[grid_stack_from_deflection_stack] ( identifier[grid_stack] = identifier[new_grid_stack] ,
identifier[deflection_stack] = identifier[scaled_deflection_stack] )
keyword[elif] identifier[plane_index] == literal[int] :
keyword[return] identifier[new_grid_stack] . identifier[regular]
keyword[return] identifier[new_grid_stack] . identifier[regular] | def grid_at_redshift_from_image_plane_grid_and_redshift(self, image_plane_grid, redshift):
"""For an input grid of (y,x) arc-second image-plane coordinates, ray-trace the coordinates to any redshift in the strong lens configuration.
This is performed using multi-plane ray-tracing and the existing redshifts and planes of the tracer. However, any redshift can be input even if a plane does not exist there, including redshifts before the first plane of the lensing system.
Parameters
----------
image_plane_grid : ndsrray or grids.RegularGrid
The image-plane grid which is traced to the redshift.
redshift : float
The redshift the image-plane grid is traced to.
"""
# TODO : We need to come up with a better abstraction for multi-plane lensing 0_0
image_plane_grid_stack = grids.GridStack(regular=image_plane_grid, sub=np.array([[0.0, 0.0]]), blurring=np.array([[0.0, 0.0]]))
tracer = TracerMultiPlanes(galaxies=self.galaxies, image_plane_grid_stack=image_plane_grid_stack, border=None, cosmology=self.cosmology)
for plane_index in range(0, len(self.plane_redshifts)):
new_grid_stack = image_plane_grid_stack
if redshift <= tracer.plane_redshifts[plane_index]:
# If redshift is between two planes, we need to map over all previous planes coordinates / deflections.
if plane_index > 0:
for previous_plane_index in range(plane_index):
scaling_factor = cosmology_util.scaling_factor_between_redshifts_from_redshifts_and_cosmology(redshift_0=tracer.plane_redshifts[previous_plane_index], redshift_1=redshift, redshift_final=tracer.plane_redshifts[-1], cosmology=tracer.cosmology)
scaled_deflection_stack = lens_util.scaled_deflection_stack_from_plane_and_scaling_factor(plane=tracer.planes[previous_plane_index], scaling_factor=scaling_factor)
new_grid_stack = lens_util.grid_stack_from_deflection_stack(grid_stack=new_grid_stack, deflection_stack=scaled_deflection_stack) # depends on [control=['for'], data=['previous_plane_index']] # depends on [control=['if'], data=['plane_index']]
# If redshift is before the first plane, no change to image pllane coordinates.
elif plane_index == 0:
return new_grid_stack.regular # depends on [control=['if'], data=[]]
return new_grid_stack.regular # depends on [control=['if'], data=['redshift']] # depends on [control=['for'], data=['plane_index']] |
def blk_1d(blk, shape):
"""Iterate through the slices that recover a line.
This function is used by :func:`blk_nd` as a base 1d case.
The last slice is returned even if is lesser than blk.
:param blk: the size of the block
:param shape: the size of the array
:return: a generator that yields the slices
"""
maxpix, rem = blk_coverage_1d(blk, shape)
for i in range(0, maxpix, blk):
yield slice(i, i + blk)
if rem != 0:
yield slice(maxpix, shape) | def function[blk_1d, parameter[blk, shape]]:
constant[Iterate through the slices that recover a line.
This function is used by :func:`blk_nd` as a base 1d case.
The last slice is returned even if is lesser than blk.
:param blk: the size of the block
:param shape: the size of the array
:return: a generator that yields the slices
]
<ast.Tuple object at 0x7da1b2356980> assign[=] call[name[blk_coverage_1d], parameter[name[blk], name[shape]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[maxpix], name[blk]]]] begin[:]
<ast.Yield object at 0x7da1b2357eb0>
if compare[name[rem] not_equal[!=] constant[0]] begin[:]
<ast.Yield object at 0x7da1b2357fd0> | keyword[def] identifier[blk_1d] ( identifier[blk] , identifier[shape] ):
literal[string]
identifier[maxpix] , identifier[rem] = identifier[blk_coverage_1d] ( identifier[blk] , identifier[shape] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[maxpix] , identifier[blk] ):
keyword[yield] identifier[slice] ( identifier[i] , identifier[i] + identifier[blk] )
keyword[if] identifier[rem] != literal[int] :
keyword[yield] identifier[slice] ( identifier[maxpix] , identifier[shape] ) | def blk_1d(blk, shape):
"""Iterate through the slices that recover a line.
This function is used by :func:`blk_nd` as a base 1d case.
The last slice is returned even if is lesser than blk.
:param blk: the size of the block
:param shape: the size of the array
:return: a generator that yields the slices
"""
(maxpix, rem) = blk_coverage_1d(blk, shape)
for i in range(0, maxpix, blk):
yield slice(i, i + blk) # depends on [control=['for'], data=['i']]
if rem != 0:
yield slice(maxpix, shape) # depends on [control=['if'], data=[]] |
def vms(nictag):
'''
List all vms connect to nictag
nictag : string
name of nictag
CLI Example:
.. code-block:: bash
salt '*' nictagadm.vms admin
'''
ret = {}
cmd = 'nictagadm vms {0}'.format(nictag)
res = __salt__['cmd.run_all'](cmd)
retcode = res['retcode']
if retcode != 0:
ret['Error'] = res['stderr'] if 'stderr' in res else 'Failed to get list of vms.'
else:
ret = res['stdout'].splitlines()
return ret | def function[vms, parameter[nictag]]:
constant[
List all vms connect to nictag
nictag : string
name of nictag
CLI Example:
.. code-block:: bash
salt '*' nictagadm.vms admin
]
variable[ret] assign[=] dictionary[[], []]
variable[cmd] assign[=] call[constant[nictagadm vms {0}].format, parameter[name[nictag]]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
variable[retcode] assign[=] call[name[res]][constant[retcode]]
if compare[name[retcode] not_equal[!=] constant[0]] begin[:]
call[name[ret]][constant[Error]] assign[=] <ast.IfExp object at 0x7da20c7cbca0>
return[name[ret]] | keyword[def] identifier[vms] ( identifier[nictag] ):
literal[string]
identifier[ret] ={}
identifier[cmd] = literal[string] . identifier[format] ( identifier[nictag] )
identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[cmd] )
identifier[retcode] = identifier[res] [ literal[string] ]
keyword[if] identifier[retcode] != literal[int] :
identifier[ret] [ literal[string] ]= identifier[res] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[res] keyword[else] literal[string]
keyword[else] :
identifier[ret] = identifier[res] [ literal[string] ]. identifier[splitlines] ()
keyword[return] identifier[ret] | def vms(nictag):
"""
List all vms connect to nictag
nictag : string
name of nictag
CLI Example:
.. code-block:: bash
salt '*' nictagadm.vms admin
"""
ret = {}
cmd = 'nictagadm vms {0}'.format(nictag)
res = __salt__['cmd.run_all'](cmd)
retcode = res['retcode']
if retcode != 0:
ret['Error'] = res['stderr'] if 'stderr' in res else 'Failed to get list of vms.' # depends on [control=['if'], data=[]]
else:
ret = res['stdout'].splitlines()
return ret |
def p_mixin(self, p):
""" mixin_decl : open_mixin declaration_list brace_close
"""
self.scope.add_mixin(Mixin(list(p)[1:], p.lineno(3)).parse(self.scope))
self.scope.pop()
p[0] = None | def function[p_mixin, parameter[self, p]]:
constant[ mixin_decl : open_mixin declaration_list brace_close
]
call[name[self].scope.add_mixin, parameter[call[call[name[Mixin], parameter[call[call[name[list], parameter[name[p]]]][<ast.Slice object at 0x7da1aff8c4c0>], call[name[p].lineno, parameter[constant[3]]]]].parse, parameter[name[self].scope]]]]
call[name[self].scope.pop, parameter[]]
call[name[p]][constant[0]] assign[=] constant[None] | keyword[def] identifier[p_mixin] ( identifier[self] , identifier[p] ):
literal[string]
identifier[self] . identifier[scope] . identifier[add_mixin] ( identifier[Mixin] ( identifier[list] ( identifier[p] )[ literal[int] :], identifier[p] . identifier[lineno] ( literal[int] )). identifier[parse] ( identifier[self] . identifier[scope] ))
identifier[self] . identifier[scope] . identifier[pop] ()
identifier[p] [ literal[int] ]= keyword[None] | def p_mixin(self, p):
""" mixin_decl : open_mixin declaration_list brace_close
"""
self.scope.add_mixin(Mixin(list(p)[1:], p.lineno(3)).parse(self.scope))
self.scope.pop()
p[0] = None |
def create_module_page(mod, dest_path, force=False):
"Create the documentation notebook for module `mod_name` in path `dest_path`"
nb = get_empty_notebook()
mod_name = mod.__name__
strip_name = strip_fastai(mod_name)
init_cell = [get_md_cell(f'## Title for {strip_name} (use plain english, not module name!)'), get_md_cell('Type an introduction of the package here.')]
cells = [get_code_cell(f'from fastai.gen_doc.nbdoc import *\nfrom {mod_name} import * ', True)]
gvar_map = get_global_vars(mod)
if gvar_map: cells.append(get_md_cell('### Global Variable Definitions:'))
for name in get_exports(mod):
if name in gvar_map: cells.append(get_md_cell(gvar_map[name]))
for ft_name in get_ft_names(mod, include_inner=True):
if not hasattr(mod, ft_name):
warnings.warn(f"Module {strip_name} doesn't have a function named {ft_name}.")
continue
cells += _symbol_skeleton(ft_name)
elt = getattr(mod, ft_name)
nb['cells'] = init_cell + cells + [get_md_cell(UNDOC_HEADER)]
doc_path = get_doc_path(mod, dest_path)
write_nb(nb, doc_path, 'w' if force else 'x')
execute_nb(doc_path)
return doc_path | def function[create_module_page, parameter[mod, dest_path, force]]:
constant[Create the documentation notebook for module `mod_name` in path `dest_path`]
variable[nb] assign[=] call[name[get_empty_notebook], parameter[]]
variable[mod_name] assign[=] name[mod].__name__
variable[strip_name] assign[=] call[name[strip_fastai], parameter[name[mod_name]]]
variable[init_cell] assign[=] list[[<ast.Call object at 0x7da1b1df9b70>, <ast.Call object at 0x7da1b1e9b9d0>]]
variable[cells] assign[=] list[[<ast.Call object at 0x7da1b1e9b340>]]
variable[gvar_map] assign[=] call[name[get_global_vars], parameter[name[mod]]]
if name[gvar_map] begin[:]
call[name[cells].append, parameter[call[name[get_md_cell], parameter[constant[### Global Variable Definitions:]]]]]
for taget[name[name]] in starred[call[name[get_exports], parameter[name[mod]]]] begin[:]
if compare[name[name] in name[gvar_map]] begin[:]
call[name[cells].append, parameter[call[name[get_md_cell], parameter[call[name[gvar_map]][name[name]]]]]]
for taget[name[ft_name]] in starred[call[name[get_ft_names], parameter[name[mod]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1e9aaa0> begin[:]
call[name[warnings].warn, parameter[<ast.JoinedStr object at 0x7da1b1e9acb0>]]
continue
<ast.AugAssign object at 0x7da1b1d6c790>
variable[elt] assign[=] call[name[getattr], parameter[name[mod], name[ft_name]]]
call[name[nb]][constant[cells]] assign[=] binary_operation[binary_operation[name[init_cell] + name[cells]] + list[[<ast.Call object at 0x7da1b1d6fb50>]]]
variable[doc_path] assign[=] call[name[get_doc_path], parameter[name[mod], name[dest_path]]]
call[name[write_nb], parameter[name[nb], name[doc_path], <ast.IfExp object at 0x7da1b1d6e200>]]
call[name[execute_nb], parameter[name[doc_path]]]
return[name[doc_path]] | keyword[def] identifier[create_module_page] ( identifier[mod] , identifier[dest_path] , identifier[force] = keyword[False] ):
literal[string]
identifier[nb] = identifier[get_empty_notebook] ()
identifier[mod_name] = identifier[mod] . identifier[__name__]
identifier[strip_name] = identifier[strip_fastai] ( identifier[mod_name] )
identifier[init_cell] =[ identifier[get_md_cell] ( literal[string] ), identifier[get_md_cell] ( literal[string] )]
identifier[cells] =[ identifier[get_code_cell] ( literal[string] , keyword[True] )]
identifier[gvar_map] = identifier[get_global_vars] ( identifier[mod] )
keyword[if] identifier[gvar_map] : identifier[cells] . identifier[append] ( identifier[get_md_cell] ( literal[string] ))
keyword[for] identifier[name] keyword[in] identifier[get_exports] ( identifier[mod] ):
keyword[if] identifier[name] keyword[in] identifier[gvar_map] : identifier[cells] . identifier[append] ( identifier[get_md_cell] ( identifier[gvar_map] [ identifier[name] ]))
keyword[for] identifier[ft_name] keyword[in] identifier[get_ft_names] ( identifier[mod] , identifier[include_inner] = keyword[True] ):
keyword[if] keyword[not] identifier[hasattr] ( identifier[mod] , identifier[ft_name] ):
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[continue]
identifier[cells] += identifier[_symbol_skeleton] ( identifier[ft_name] )
identifier[elt] = identifier[getattr] ( identifier[mod] , identifier[ft_name] )
identifier[nb] [ literal[string] ]= identifier[init_cell] + identifier[cells] +[ identifier[get_md_cell] ( identifier[UNDOC_HEADER] )]
identifier[doc_path] = identifier[get_doc_path] ( identifier[mod] , identifier[dest_path] )
identifier[write_nb] ( identifier[nb] , identifier[doc_path] , literal[string] keyword[if] identifier[force] keyword[else] literal[string] )
identifier[execute_nb] ( identifier[doc_path] )
keyword[return] identifier[doc_path] | def create_module_page(mod, dest_path, force=False):
"""Create the documentation notebook for module `mod_name` in path `dest_path`"""
nb = get_empty_notebook()
mod_name = mod.__name__
strip_name = strip_fastai(mod_name)
init_cell = [get_md_cell(f'## Title for {strip_name} (use plain english, not module name!)'), get_md_cell('Type an introduction of the package here.')]
cells = [get_code_cell(f'from fastai.gen_doc.nbdoc import *\nfrom {mod_name} import * ', True)]
gvar_map = get_global_vars(mod)
if gvar_map:
cells.append(get_md_cell('### Global Variable Definitions:')) # depends on [control=['if'], data=[]]
for name in get_exports(mod):
if name in gvar_map:
cells.append(get_md_cell(gvar_map[name])) # depends on [control=['if'], data=['name', 'gvar_map']] # depends on [control=['for'], data=['name']]
for ft_name in get_ft_names(mod, include_inner=True):
if not hasattr(mod, ft_name):
warnings.warn(f"Module {strip_name} doesn't have a function named {ft_name}.")
continue # depends on [control=['if'], data=[]]
cells += _symbol_skeleton(ft_name)
elt = getattr(mod, ft_name) # depends on [control=['for'], data=['ft_name']]
nb['cells'] = init_cell + cells + [get_md_cell(UNDOC_HEADER)]
doc_path = get_doc_path(mod, dest_path)
write_nb(nb, doc_path, 'w' if force else 'x')
execute_nb(doc_path)
return doc_path |
def serialize_hook(instance):
"""
Serialize the object down to Python primitives.
By default it uses Django's built in serializer.
"""
if getattr(instance, 'serialize_hook', None) and callable(instance.serialize_hook):
return instance.serialize_hook(hook=instance)
if getattr(settings, 'HOOK_SERIALIZER', None):
serializer = get_module(settings.HOOK_SERIALIZER)
return serializer(instance, hook=instance)
# if no user defined serializers, fallback to the django builtin!
return {
'hook': instance.dict(),
'data': serializers.serialize('python', [instance])[0]
} | def function[serialize_hook, parameter[instance]]:
constant[
Serialize the object down to Python primitives.
By default it uses Django's built in serializer.
]
if <ast.BoolOp object at 0x7da1b0f3b3d0> begin[:]
return[call[name[instance].serialize_hook, parameter[]]]
if call[name[getattr], parameter[name[settings], constant[HOOK_SERIALIZER], constant[None]]] begin[:]
variable[serializer] assign[=] call[name[get_module], parameter[name[settings].HOOK_SERIALIZER]]
return[call[name[serializer], parameter[name[instance]]]]
return[dictionary[[<ast.Constant object at 0x7da1b0d521d0>, <ast.Constant object at 0x7da1b0d53fa0>], [<ast.Call object at 0x7da1b0d539d0>, <ast.Subscript object at 0x7da1b0d500a0>]]] | keyword[def] identifier[serialize_hook] ( identifier[instance] ):
literal[string]
keyword[if] identifier[getattr] ( identifier[instance] , literal[string] , keyword[None] ) keyword[and] identifier[callable] ( identifier[instance] . identifier[serialize_hook] ):
keyword[return] identifier[instance] . identifier[serialize_hook] ( identifier[hook] = identifier[instance] )
keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[None] ):
identifier[serializer] = identifier[get_module] ( identifier[settings] . identifier[HOOK_SERIALIZER] )
keyword[return] identifier[serializer] ( identifier[instance] , identifier[hook] = identifier[instance] )
keyword[return] {
literal[string] : identifier[instance] . identifier[dict] (),
literal[string] : identifier[serializers] . identifier[serialize] ( literal[string] ,[ identifier[instance] ])[ literal[int] ]
} | def serialize_hook(instance):
"""
Serialize the object down to Python primitives.
By default it uses Django's built in serializer.
"""
if getattr(instance, 'serialize_hook', None) and callable(instance.serialize_hook):
return instance.serialize_hook(hook=instance) # depends on [control=['if'], data=[]]
if getattr(settings, 'HOOK_SERIALIZER', None):
serializer = get_module(settings.HOOK_SERIALIZER)
return serializer(instance, hook=instance) # depends on [control=['if'], data=[]]
# if no user defined serializers, fallback to the django builtin!
return {'hook': instance.dict(), 'data': serializers.serialize('python', [instance])[0]} |
def unpack_ihex(record):
"""Unpack given Intel HEX record into variables.
"""
# Minimum :SSAAAATTCC, where SS is size, AAAA is address, TT is
# type and CC is crc.
if len(record) < 11:
raise Error("record '{}' too short".format(record))
if record[0] != ':':
raise Error("record '{}' not starting with a ':'".format(record))
size = int(record[1:3], 16)
address = int(record[3:7], 16)
type_ = int(record[7:9], 16)
if size > 0:
data = binascii.unhexlify(record[9:9 + 2 * size])
else:
data = b''
actual_crc = int(record[9 + 2 * size:], 16)
expected_crc = crc_ihex(record[1:9 + 2 * size])
if actual_crc != expected_crc:
raise Error(
"expected crc '{:02X}' in record {}, but got '{:02X}'".format(
expected_crc,
record,
actual_crc))
return (type_, address, size, data) | def function[unpack_ihex, parameter[record]]:
constant[Unpack given Intel HEX record into variables.
]
if compare[call[name[len], parameter[name[record]]] less[<] constant[11]] begin[:]
<ast.Raise object at 0x7da18f58ebf0>
if compare[call[name[record]][constant[0]] not_equal[!=] constant[:]] begin[:]
<ast.Raise object at 0x7da18f58f040>
variable[size] assign[=] call[name[int], parameter[call[name[record]][<ast.Slice object at 0x7da18f58ef20>], constant[16]]]
variable[address] assign[=] call[name[int], parameter[call[name[record]][<ast.Slice object at 0x7da18f58f3d0>], constant[16]]]
variable[type_] assign[=] call[name[int], parameter[call[name[record]][<ast.Slice object at 0x7da18f58d150>], constant[16]]]
if compare[name[size] greater[>] constant[0]] begin[:]
variable[data] assign[=] call[name[binascii].unhexlify, parameter[call[name[record]][<ast.Slice object at 0x7da18f58d900>]]]
variable[actual_crc] assign[=] call[name[int], parameter[call[name[record]][<ast.Slice object at 0x7da18f58eb90>], constant[16]]]
variable[expected_crc] assign[=] call[name[crc_ihex], parameter[call[name[record]][<ast.Slice object at 0x7da18f58cfa0>]]]
if compare[name[actual_crc] not_equal[!=] name[expected_crc]] begin[:]
<ast.Raise object at 0x7da18f58dc00>
return[tuple[[<ast.Name object at 0x7da18f58da50>, <ast.Name object at 0x7da18f58d660>, <ast.Name object at 0x7da18f58e020>, <ast.Name object at 0x7da18f58ca90>]]] | keyword[def] identifier[unpack_ihex] ( identifier[record] ):
literal[string]
keyword[if] identifier[len] ( identifier[record] )< literal[int] :
keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[record] ))
keyword[if] identifier[record] [ literal[int] ]!= literal[string] :
keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[record] ))
identifier[size] = identifier[int] ( identifier[record] [ literal[int] : literal[int] ], literal[int] )
identifier[address] = identifier[int] ( identifier[record] [ literal[int] : literal[int] ], literal[int] )
identifier[type_] = identifier[int] ( identifier[record] [ literal[int] : literal[int] ], literal[int] )
keyword[if] identifier[size] > literal[int] :
identifier[data] = identifier[binascii] . identifier[unhexlify] ( identifier[record] [ literal[int] : literal[int] + literal[int] * identifier[size] ])
keyword[else] :
identifier[data] = literal[string]
identifier[actual_crc] = identifier[int] ( identifier[record] [ literal[int] + literal[int] * identifier[size] :], literal[int] )
identifier[expected_crc] = identifier[crc_ihex] ( identifier[record] [ literal[int] : literal[int] + literal[int] * identifier[size] ])
keyword[if] identifier[actual_crc] != identifier[expected_crc] :
keyword[raise] identifier[Error] (
literal[string] . identifier[format] (
identifier[expected_crc] ,
identifier[record] ,
identifier[actual_crc] ))
keyword[return] ( identifier[type_] , identifier[address] , identifier[size] , identifier[data] ) | def unpack_ihex(record):
"""Unpack given Intel HEX record into variables.
"""
# Minimum :SSAAAATTCC, where SS is size, AAAA is address, TT is
# type and CC is crc.
if len(record) < 11:
raise Error("record '{}' too short".format(record)) # depends on [control=['if'], data=[]]
if record[0] != ':':
raise Error("record '{}' not starting with a ':'".format(record)) # depends on [control=['if'], data=[]]
size = int(record[1:3], 16)
address = int(record[3:7], 16)
type_ = int(record[7:9], 16)
if size > 0:
data = binascii.unhexlify(record[9:9 + 2 * size]) # depends on [control=['if'], data=['size']]
else:
data = b''
actual_crc = int(record[9 + 2 * size:], 16)
expected_crc = crc_ihex(record[1:9 + 2 * size])
if actual_crc != expected_crc:
raise Error("expected crc '{:02X}' in record {}, but got '{:02X}'".format(expected_crc, record, actual_crc)) # depends on [control=['if'], data=['actual_crc', 'expected_crc']]
return (type_, address, size, data) |
def diff(self, **kwargs):
"""Generate the commit diff.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the diff could not be retrieved
Returns:
list: The changes done in this commit
"""
path = '%s/%s/diff' % (self.manager.path, self.get_id())
return self.manager.gitlab.http_get(path, **kwargs) | def function[diff, parameter[self]]:
constant[Generate the commit diff.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the diff could not be retrieved
Returns:
list: The changes done in this commit
]
variable[path] assign[=] binary_operation[constant[%s/%s/diff] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26ae950>, <ast.Call object at 0x7da1b26ade10>]]]
return[call[name[self].manager.gitlab.http_get, parameter[name[path]]]] | keyword[def] identifier[diff] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = literal[string] %( identifier[self] . identifier[manager] . identifier[path] , identifier[self] . identifier[get_id] ())
keyword[return] identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_get] ( identifier[path] ,** identifier[kwargs] ) | def diff(self, **kwargs):
"""Generate the commit diff.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the diff could not be retrieved
Returns:
list: The changes done in this commit
"""
path = '%s/%s/diff' % (self.manager.path, self.get_id())
return self.manager.gitlab.http_get(path, **kwargs) |
def publishfromconfig(self, configFiles, combinedApp=None, dateTimeFormat=None):
"""Parses a JSON configuration file to publish data.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for publishing.
combinedApp (str): A JSON file on disk containing configuration data
for app publishing. Defaults to ``None``.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
"""
publishTools = None
webmaps = None
config = None
resultsItems = None
resultFS = None
resultMaps = None
resultApps = None
combinedResults = None
if dateTimeFormat is None:
dateTimeFormat = '%Y-%m-%d %H:%M'
scriptStartTime = datetime.datetime.now()
try:
webmaps = []
print ("********************Script Started********************")
print ("Script started at %s" % scriptStartTime.strftime(dateTimeFormat))
# start report processing (moved out from under ArcREST logic. no AGO crednetials needed to run reports)
for configFile in configFiles:
config = common.init_config_json(config_file=configFile)
if config is not None:
if 'ReportDetails' in config:
if reportToolsInstalled == False:
print ("Report section is included in the config file but the solutionreporttools cannot be located")
else:
reportConfig = config['ReportDetails']
# This code checks to see if you want to export the data from SDE to a local GDB. The parameter is set in config file.
# Could be performance gain to run locally. If you choose this option, both the report and the data prep in memory config
# are modified so they can point to the local temp location.
if 'RunReport' in reportConfig and (str(reportConfig['RunReport']).upper() =="TRUE" or str(reportConfig['RunReport']).upper() =="YES"):
reportConfig = ReportTools.reportDataPrep(reportConfig)
print ("-----Report Section Starting-----")
startTime = datetime.datetime.now()
print ("Processing reports in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)))
ReportTools.create_report_layers_using_config(config=reportConfig)
print ("Reports in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)))
print ("-----Report Section Complete-----")
if 'PublishingDetails' in config:
publishingConfig = config['PublishingDetails']
if 'PublishData' in publishingConfig:
publishData = publishingConfig['PublishData']
else:
print ("PublishingDetails is missing the PublishData parameter: type string, values, True or False")
publishData = 'TRUE'
if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"):
print (" ")
print ("-----Publishing Section Starting-----")
startTime = datetime.datetime.now()
print ("Processing publishing in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)))
publishTools = publishingtools.publishingtools(securityinfo=self)
if publishTools.valid == False :
print ("Error creating publishing tools: %s" % publishTools.message)
else:
print ("Publishing tools created: %s" % publishTools.message)
resultFS = []
if 'Items' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Creating Items: %s" % str(startSectTime.strftime(dateTimeFormat)))
resultsItems = publishTools.publishItems(items_info=publishingConfig['Items'])
print ("Items created, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
if 'FeatureCollections' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Creating Feature Collection: %s" % str(startSectTime.strftime(dateTimeFormat)))
resultFS = publishTools.publishFeatureCollections(configs=publishingConfig['FeatureCollections'])
print ("Feature Collection published, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
if 'FeatureServices' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Creating Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)))
res = publishTools.publishFsFromMXD(fs_config=publishingConfig['FeatureServices'])
if res is None:
return
resultFS = resultFS + res
if len(resultFS) == 0:
print ("Exiting, error creating feature services")
return
print ("Feature Services published, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
if 'ExistingServices' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Updating Existing Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)))
resultES = publishTools.updateFeatureService(efs_config=publishingConfig['ExistingServices'])
print ("Updating Existing Feature Services completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
if 'MapDetails' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Creating maps: %s" % str(startSectTime.strftime(dateTimeFormat)))
resultMaps = publishTools.publishMap(maps_info=publishingConfig['MapDetails'],fsInfo=resultFS,itInfo=resultsItems)
for maps in resultMaps:
if 'MapInfo' in maps:
if 'Results' in maps['MapInfo']:
if 'itemId' in maps['MapInfo']['Results']:
webmaps.append(maps['MapInfo']['Results']['itemId'])
print ("Creating maps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
if 'AppDetails' in publishingConfig:
startSectTime = datetime.datetime.now()
print (" ")
print ("Creating apps: %s" % str(startSectTime.strftime(dateTimeFormat)))
resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=resultMaps,fsInfo=resultFS)
print ("Creating apps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
print ("Publishing complete in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)))
print ("-----Publishing Section Complete-----")
else:
print ("Config %s not found" % configFile)
if combinedApp:
if os.path.exists(combinedApp):
print (" ")
startSectTime = datetime.datetime.now()
print ("Creating combined result: %s" % str(startSectTime.strftime(dateTimeFormat)))
config = common.init_config_json(config_file=combinedApp)
combinedResults = publishTools.publishCombinedWebMap(maps_info=config['PublishingDetails']['MapDetails'],webmaps=webmaps)
if 'PublishingDetails' in config:
publishingConfig = config['PublishingDetails']
if 'PublishData' in publishingConfig:
publishData = publishingConfig['PublishData']
else:
print ("PublishingDetails is missing the PublishData parameter: type string, values, True or False")
publishData = 'TRUE'
if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"):
if 'AppDetails' in publishingConfig:
resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=combinedResults)
print ("Creating combind result completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime))
except(TypeError,ValueError,AttributeError) as e:
print (e)
except (common.ArcRestHelperError) as e:
print ("error in function: %s" % e[0]['function'])
print ("error on line: %s" % e[0]['line'])
print ("error in file name: %s" % e[0]['filename'])
print ("with error message: %s" % e[0]['synerror'])
if 'arcpyError' in e[0]:
print ("with arcpy message: %s" % e[0]['arcpyError'])
except Exception as e:
if (reportToolsInstalled):
if isinstance(e,(ReportTools.ReportToolsError,DataPrep.DataPrepError)):
print ("error in function: %s" % e[0]['function'])
print ("error on line: %s" % e[0]['line'])
print ("error in file name: %s" % e[0]['filename'])
print ("with error message: %s" % e[0]['synerror'])
if 'arcpyError' in e[0]:
print ("with arcpy message: %s" % e[0]['arcpyError'])
else:
line, filename, synerror = trace()
print ("error on line: %s" % line)
print ("error in file name: %s" % filename)
print ("with error message: %s" % synerror)
else:
line, filename, synerror = trace()
print ("error on line: %s" % line)
print ("error in file name: %s" % filename)
print ("with error message: %s" % synerror)
finally:
print ("Script complete, time to complete: %s" % str(datetime.datetime.now() - scriptStartTime))
print ("###############Script Completed#################")
print ("")
if publishTools is not None:
publishTools.dispose()
publishTools = None
webmaps = None
config = None
resultFS = None
resultsItems = None
resultMaps = None
resultApps = None
combinedResults = None
del publishTools
del webmaps
del config
del resultFS
del resultMaps
del resultApps
del combinedResults
del resultsItems
gc.collect() | def function[publishfromconfig, parameter[self, configFiles, combinedApp, dateTimeFormat]]:
constant[Parses a JSON configuration file to publish data.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for publishing.
combinedApp (str): A JSON file on disk containing configuration data
for app publishing. Defaults to ``None``.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
]
variable[publishTools] assign[=] constant[None]
variable[webmaps] assign[=] constant[None]
variable[config] assign[=] constant[None]
variable[resultsItems] assign[=] constant[None]
variable[resultFS] assign[=] constant[None]
variable[resultMaps] assign[=] constant[None]
variable[resultApps] assign[=] constant[None]
variable[combinedResults] assign[=] constant[None]
if compare[name[dateTimeFormat] is constant[None]] begin[:]
variable[dateTimeFormat] assign[=] constant[%Y-%m-%d %H:%M]
variable[scriptStartTime] assign[=] call[name[datetime].datetime.now, parameter[]]
<ast.Try object at 0x7da1b128a2f0> | keyword[def] identifier[publishfromconfig] ( identifier[self] , identifier[configFiles] , identifier[combinedApp] = keyword[None] , identifier[dateTimeFormat] = keyword[None] ):
literal[string]
identifier[publishTools] = keyword[None]
identifier[webmaps] = keyword[None]
identifier[config] = keyword[None]
identifier[resultsItems] = keyword[None]
identifier[resultFS] = keyword[None]
identifier[resultMaps] = keyword[None]
identifier[resultApps] = keyword[None]
identifier[combinedResults] = keyword[None]
keyword[if] identifier[dateTimeFormat] keyword[is] keyword[None] :
identifier[dateTimeFormat] = literal[string]
identifier[scriptStartTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
keyword[try] :
identifier[webmaps] =[]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[scriptStartTime] . identifier[strftime] ( identifier[dateTimeFormat] ))
keyword[for] identifier[configFile] keyword[in] identifier[configFiles] :
identifier[config] = identifier[common] . identifier[init_config_json] ( identifier[config_file] = identifier[configFile] )
keyword[if] identifier[config] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[string] keyword[in] identifier[config] :
keyword[if] identifier[reportToolsInstalled] == keyword[False] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[reportConfig] = identifier[config] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[reportConfig] keyword[and] ( identifier[str] ( identifier[reportConfig] [ literal[string] ]). identifier[upper] ()== literal[string] keyword[or] identifier[str] ( identifier[reportConfig] [ literal[string] ]). identifier[upper] ()== literal[string] ):
identifier[reportConfig] = identifier[ReportTools] . identifier[reportDataPrep] ( identifier[reportConfig] )
identifier[print] ( literal[string] )
identifier[startTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] %( identifier[configFile] , identifier[startTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[ReportTools] . identifier[create_report_layers_using_config] ( identifier[config] = identifier[reportConfig] )
identifier[print] ( literal[string] %( identifier[configFile] , identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startTime] )))
identifier[print] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[config] :
identifier[publishingConfig] = identifier[config] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[publishData] = identifier[publishingConfig] [ literal[string] ]
keyword[else] :
identifier[print] ( literal[string] )
identifier[publishData] = literal[string]
keyword[if] ( identifier[str] ( identifier[publishData] ). identifier[upper] ()== literal[string] keyword[or] identifier[str] ( identifier[publishData] ). identifier[upper] ()== literal[string] ):
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[startTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] %( identifier[configFile] , identifier[startTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[publishTools] = identifier[publishingtools] . identifier[publishingtools] ( identifier[securityinfo] = identifier[self] )
keyword[if] identifier[publishTools] . identifier[valid] == keyword[False] :
identifier[print] ( literal[string] % identifier[publishTools] . identifier[message] )
keyword[else] :
identifier[print] ( literal[string] % identifier[publishTools] . identifier[message] )
identifier[resultFS] =[]
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[resultsItems] = identifier[publishTools] . identifier[publishItems] ( identifier[items_info] = identifier[publishingConfig] [ literal[string] ])
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[resultFS] = identifier[publishTools] . identifier[publishFeatureCollections] ( identifier[configs] = identifier[publishingConfig] [ literal[string] ])
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[res] = identifier[publishTools] . identifier[publishFsFromMXD] ( identifier[fs_config] = identifier[publishingConfig] [ literal[string] ])
keyword[if] identifier[res] keyword[is] keyword[None] :
keyword[return]
identifier[resultFS] = identifier[resultFS] + identifier[res]
keyword[if] identifier[len] ( identifier[resultFS] )== literal[int] :
identifier[print] ( literal[string] )
keyword[return]
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[resultES] = identifier[publishTools] . identifier[updateFeatureService] ( identifier[efs_config] = identifier[publishingConfig] [ literal[string] ])
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[resultMaps] = identifier[publishTools] . identifier[publishMap] ( identifier[maps_info] = identifier[publishingConfig] [ literal[string] ], identifier[fsInfo] = identifier[resultFS] , identifier[itInfo] = identifier[resultsItems] )
keyword[for] identifier[maps] keyword[in] identifier[resultMaps] :
keyword[if] literal[string] keyword[in] identifier[maps] :
keyword[if] literal[string] keyword[in] identifier[maps] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[maps] [ literal[string] ][ literal[string] ]:
identifier[webmaps] . identifier[append] ( identifier[maps] [ literal[string] ][ literal[string] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[resultApps] = identifier[publishTools] . identifier[publishApp] ( identifier[app_info] = identifier[publishingConfig] [ literal[string] ], identifier[map_info] = identifier[resultMaps] , identifier[fsInfo] = identifier[resultFS] )
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
identifier[print] ( literal[string] %( identifier[configFile] , identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startTime] )))
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] % identifier[configFile] )
keyword[if] identifier[combinedApp] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[combinedApp] ):
identifier[print] ( literal[string] )
identifier[startSectTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] % identifier[str] ( identifier[startSectTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[config] = identifier[common] . identifier[init_config_json] ( identifier[config_file] = identifier[combinedApp] )
identifier[combinedResults] = identifier[publishTools] . identifier[publishCombinedWebMap] ( identifier[maps_info] = identifier[config] [ literal[string] ][ literal[string] ], identifier[webmaps] = identifier[webmaps] )
keyword[if] literal[string] keyword[in] identifier[config] :
identifier[publishingConfig] = identifier[config] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[publishData] = identifier[publishingConfig] [ literal[string] ]
keyword[else] :
identifier[print] ( literal[string] )
identifier[publishData] = literal[string]
keyword[if] ( identifier[str] ( identifier[publishData] ). identifier[upper] ()== literal[string] keyword[or] identifier[str] ( identifier[publishData] ). identifier[upper] ()== literal[string] ):
keyword[if] literal[string] keyword[in] identifier[publishingConfig] :
identifier[resultApps] = identifier[publishTools] . identifier[publishApp] ( identifier[app_info] = identifier[publishingConfig] [ literal[string] ], identifier[map_info] = identifier[combinedResults] )
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[startSectTime] ))
keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[AttributeError] ) keyword[as] identifier[e] :
identifier[print] ( identifier[e] )
keyword[except] ( identifier[common] . identifier[ArcRestHelperError] ) keyword[as] identifier[e] :
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[e] [ literal[int] ]:
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] ( identifier[reportToolsInstalled] ):
keyword[if] identifier[isinstance] ( identifier[e] ,( identifier[ReportTools] . identifier[ReportToolsError] , identifier[DataPrep] . identifier[DataPrepError] )):
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[e] [ literal[int] ]:
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[else] :
identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] ()
identifier[print] ( literal[string] % identifier[line] )
identifier[print] ( literal[string] % identifier[filename] )
identifier[print] ( literal[string] % identifier[synerror] )
keyword[else] :
identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] ()
identifier[print] ( literal[string] % identifier[line] )
identifier[print] ( literal[string] % identifier[filename] )
identifier[print] ( literal[string] % identifier[synerror] )
keyword[finally] :
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[scriptStartTime] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[if] identifier[publishTools] keyword[is] keyword[not] keyword[None] :
identifier[publishTools] . identifier[dispose] ()
identifier[publishTools] = keyword[None]
identifier[webmaps] = keyword[None]
identifier[config] = keyword[None]
identifier[resultFS] = keyword[None]
identifier[resultsItems] = keyword[None]
identifier[resultMaps] = keyword[None]
identifier[resultApps] = keyword[None]
identifier[combinedResults] = keyword[None]
keyword[del] identifier[publishTools]
keyword[del] identifier[webmaps]
keyword[del] identifier[config]
keyword[del] identifier[resultFS]
keyword[del] identifier[resultMaps]
keyword[del] identifier[resultApps]
keyword[del] identifier[combinedResults]
keyword[del] identifier[resultsItems]
identifier[gc] . identifier[collect] () | def publishfromconfig(self, configFiles, combinedApp=None, dateTimeFormat=None):
"""Parses a JSON configuration file to publish data.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for publishing.
combinedApp (str): A JSON file on disk containing configuration data
for app publishing. Defaults to ``None``.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
"""
publishTools = None
webmaps = None
config = None
resultsItems = None
resultFS = None
resultMaps = None
resultApps = None
combinedResults = None
if dateTimeFormat is None:
dateTimeFormat = '%Y-%m-%d %H:%M' # depends on [control=['if'], data=['dateTimeFormat']]
scriptStartTime = datetime.datetime.now()
try:
webmaps = []
print('********************Script Started********************')
print('Script started at %s' % scriptStartTime.strftime(dateTimeFormat))
# start report processing (moved out from under ArcREST logic. no AGO crednetials needed to run reports)
for configFile in configFiles:
config = common.init_config_json(config_file=configFile)
if config is not None:
if 'ReportDetails' in config:
if reportToolsInstalled == False:
print('Report section is included in the config file but the solutionreporttools cannot be located') # depends on [control=['if'], data=[]]
else:
reportConfig = config['ReportDetails']
# This code checks to see if you want to export the data from SDE to a local GDB. The parameter is set in config file.
# Could be performance gain to run locally. If you choose this option, both the report and the data prep in memory config
# are modified so they can point to the local temp location.
if 'RunReport' in reportConfig and (str(reportConfig['RunReport']).upper() == 'TRUE' or str(reportConfig['RunReport']).upper() == 'YES'):
reportConfig = ReportTools.reportDataPrep(reportConfig)
print('-----Report Section Starting-----')
startTime = datetime.datetime.now()
print('Processing reports in config %s, starting at: %s' % (configFile, startTime.strftime(dateTimeFormat)))
ReportTools.create_report_layers_using_config(config=reportConfig)
print('Reports in config %s completed, time to complete: %s' % (configFile, str(datetime.datetime.now() - startTime)))
print('-----Report Section Complete-----') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']]
if 'PublishingDetails' in config:
publishingConfig = config['PublishingDetails']
if 'PublishData' in publishingConfig:
publishData = publishingConfig['PublishData'] # depends on [control=['if'], data=['publishingConfig']]
else:
print('PublishingDetails is missing the PublishData parameter: type string, values, True or False')
publishData = 'TRUE'
if str(publishData).upper() == 'TRUE' or str(publishData).upper() == 'YES':
print(' ')
print('-----Publishing Section Starting-----')
startTime = datetime.datetime.now()
print('Processing publishing in config %s, starting at: %s' % (configFile, startTime.strftime(dateTimeFormat)))
publishTools = publishingtools.publishingtools(securityinfo=self)
if publishTools.valid == False:
print('Error creating publishing tools: %s' % publishTools.message) # depends on [control=['if'], data=[]]
else:
print('Publishing tools created: %s' % publishTools.message)
resultFS = []
if 'Items' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Creating Items: %s' % str(startSectTime.strftime(dateTimeFormat)))
resultsItems = publishTools.publishItems(items_info=publishingConfig['Items'])
print('Items created, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
if 'FeatureCollections' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Creating Feature Collection: %s' % str(startSectTime.strftime(dateTimeFormat)))
resultFS = publishTools.publishFeatureCollections(configs=publishingConfig['FeatureCollections'])
print('Feature Collection published, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
if 'FeatureServices' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Creating Feature Services: %s' % str(startSectTime.strftime(dateTimeFormat)))
res = publishTools.publishFsFromMXD(fs_config=publishingConfig['FeatureServices'])
if res is None:
return # depends on [control=['if'], data=[]]
resultFS = resultFS + res
if len(resultFS) == 0:
print('Exiting, error creating feature services')
return # depends on [control=['if'], data=[]]
print('Feature Services published, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
if 'ExistingServices' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Updating Existing Feature Services: %s' % str(startSectTime.strftime(dateTimeFormat)))
resultES = publishTools.updateFeatureService(efs_config=publishingConfig['ExistingServices'])
print('Updating Existing Feature Services completed, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
if 'MapDetails' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Creating maps: %s' % str(startSectTime.strftime(dateTimeFormat)))
resultMaps = publishTools.publishMap(maps_info=publishingConfig['MapDetails'], fsInfo=resultFS, itInfo=resultsItems)
for maps in resultMaps:
if 'MapInfo' in maps:
if 'Results' in maps['MapInfo']:
if 'itemId' in maps['MapInfo']['Results']:
webmaps.append(maps['MapInfo']['Results']['itemId']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['maps']] # depends on [control=['for'], data=['maps']]
print('Creating maps completed, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
if 'AppDetails' in publishingConfig:
startSectTime = datetime.datetime.now()
print(' ')
print('Creating apps: %s' % str(startSectTime.strftime(dateTimeFormat)))
resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'], map_info=resultMaps, fsInfo=resultFS)
print('Creating apps completed, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=['publishingConfig']]
print('Publishing complete in config %s completed, time to complete: %s' % (configFile, str(datetime.datetime.now() - startTime)))
print('-----Publishing Section Complete-----') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']] # depends on [control=['if'], data=['config']]
else:
print('Config %s not found' % configFile) # depends on [control=['for'], data=['configFile']]
if combinedApp:
if os.path.exists(combinedApp):
print(' ')
startSectTime = datetime.datetime.now()
print('Creating combined result: %s' % str(startSectTime.strftime(dateTimeFormat)))
config = common.init_config_json(config_file=combinedApp)
combinedResults = publishTools.publishCombinedWebMap(maps_info=config['PublishingDetails']['MapDetails'], webmaps=webmaps)
if 'PublishingDetails' in config:
publishingConfig = config['PublishingDetails']
if 'PublishData' in publishingConfig:
publishData = publishingConfig['PublishData'] # depends on [control=['if'], data=['publishingConfig']]
else:
print('PublishingDetails is missing the PublishData parameter: type string, values, True or False')
publishData = 'TRUE'
if str(publishData).upper() == 'TRUE' or str(publishData).upper() == 'YES':
if 'AppDetails' in publishingConfig:
resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'], map_info=combinedResults) # depends on [control=['if'], data=['publishingConfig']]
print('Creating combind result completed, time to complete: %s' % str(datetime.datetime.now() - startSectTime)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (TypeError, ValueError, AttributeError) as e:
print(e) # depends on [control=['except'], data=['e']]
except common.ArcRestHelperError as e:
print('error in function: %s' % e[0]['function'])
print('error on line: %s' % e[0]['line'])
print('error in file name: %s' % e[0]['filename'])
print('with error message: %s' % e[0]['synerror'])
if 'arcpyError' in e[0]:
print('with arcpy message: %s' % e[0]['arcpyError']) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
except Exception as e:
if reportToolsInstalled:
if isinstance(e, (ReportTools.ReportToolsError, DataPrep.DataPrepError)):
print('error in function: %s' % e[0]['function'])
print('error on line: %s' % e[0]['line'])
print('error in file name: %s' % e[0]['filename'])
print('with error message: %s' % e[0]['synerror'])
if 'arcpyError' in e[0]:
print('with arcpy message: %s' % e[0]['arcpyError']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
(line, filename, synerror) = trace()
print('error on line: %s' % line)
print('error in file name: %s' % filename)
print('with error message: %s' % synerror) # depends on [control=['if'], data=[]]
else:
(line, filename, synerror) = trace()
print('error on line: %s' % line)
print('error in file name: %s' % filename)
print('with error message: %s' % synerror) # depends on [control=['except'], data=['e']]
finally:
print('Script complete, time to complete: %s' % str(datetime.datetime.now() - scriptStartTime))
print('###############Script Completed#################')
print('')
if publishTools is not None:
publishTools.dispose() # depends on [control=['if'], data=['publishTools']]
publishTools = None
webmaps = None
config = None
resultFS = None
resultsItems = None
resultMaps = None
resultApps = None
combinedResults = None
del publishTools
del webmaps
del config
del resultFS
del resultMaps
del resultApps
del combinedResults
del resultsItems
gc.collect() |
def printed_out(self, name):
"""
Create a string representation of the action
"""
opt = self.variables().optional_namestring()
req = self.variables().required_namestring()
out = ''
out += '| |\n'
out += '| |---{}({}{})\n'.format(name, req, opt)
if self.description:
out += '| | {}\n'.format(self.description)
return out | def function[printed_out, parameter[self, name]]:
constant[
Create a string representation of the action
]
variable[opt] assign[=] call[call[name[self].variables, parameter[]].optional_namestring, parameter[]]
variable[req] assign[=] call[call[name[self].variables, parameter[]].required_namestring, parameter[]]
variable[out] assign[=] constant[]
<ast.AugAssign object at 0x7da20c6c7820>
<ast.AugAssign object at 0x7da20c6c7a30>
if name[self].description begin[:]
<ast.AugAssign object at 0x7da20c6c5600>
return[name[out]] | keyword[def] identifier[printed_out] ( identifier[self] , identifier[name] ):
literal[string]
identifier[opt] = identifier[self] . identifier[variables] (). identifier[optional_namestring] ()
identifier[req] = identifier[self] . identifier[variables] (). identifier[required_namestring] ()
identifier[out] = literal[string]
identifier[out] += literal[string]
identifier[out] += literal[string] . identifier[format] ( identifier[name] , identifier[req] , identifier[opt] )
keyword[if] identifier[self] . identifier[description] :
identifier[out] += literal[string] . identifier[format] ( identifier[self] . identifier[description] )
keyword[return] identifier[out] | def printed_out(self, name):
"""
Create a string representation of the action
"""
opt = self.variables().optional_namestring()
req = self.variables().required_namestring()
out = ''
out += '| |\n'
out += '| |---{}({}{})\n'.format(name, req, opt)
if self.description:
out += '| | {}\n'.format(self.description) # depends on [control=['if'], data=[]]
return out |
def igphyml(input_file=None, tree_file=None, root=None, verbose=False):
'''
Computes a phylogenetic tree using IgPhyML.
.. note::
IgPhyML must be installed. It can be downloaded from https://github.com/kbhoehn/IgPhyML.
Args:
input_file (str): Path to a Phylip-formatted multiple sequence alignment. Required.
tree_file (str): Path to the output tree file.
root (str): Name of the root sequence. Required.
verbose (bool): If `True`, prints the standard output and standard error for each IgPhyML run.
Default is `False`.
'''
if shutil.which('igphyml') is None:
raise RuntimeError('It appears that IgPhyML is not installed.\nPlease install and try again.')
# first, tree topology is estimated with the M0/GY94 model
igphyml_cmd1 = 'igphyml -i {} -m GY -w M0 -t e --run_id gy94'.format(aln_file)
p1 = sp.Popen(igphyml_cmd1, stdout=sp.PIPE, stderr=sp.PIPE)
stdout1, stderr1 = p1.communicate()
if verbose:
print(stdout1 + '\n')
print(stderr1 + '\n\n')
intermediate = input_file + '_igphyml_tree.txt_gy94'
# now we fit the HLP17 model once the tree topology is fixed
igphyml_cmd2 = 'igphyml -i {0} -m HLP17 --root {1} -o lr -u {}_igphyml_tree.txt_gy94 -o {}'.format(input_file,
root,
tree_file)
p2 = sp.Popen(igphyml_cmd2, stdout=sp.PIPE, stderr=sp.PIPE)
stdout2, stderr2 = p2.communicate()
if verbose:
print(stdout2 + '\n')
print(stderr2 + '\n')
return tree_file + '_igphyml_tree.txt' | def function[igphyml, parameter[input_file, tree_file, root, verbose]]:
constant[
Computes a phylogenetic tree using IgPhyML.
.. note::
IgPhyML must be installed. It can be downloaded from https://github.com/kbhoehn/IgPhyML.
Args:
input_file (str): Path to a Phylip-formatted multiple sequence alignment. Required.
tree_file (str): Path to the output tree file.
root (str): Name of the root sequence. Required.
verbose (bool): If `True`, prints the standard output and standard error for each IgPhyML run.
Default is `False`.
]
if compare[call[name[shutil].which, parameter[constant[igphyml]]] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f722a40>
variable[igphyml_cmd1] assign[=] call[constant[igphyml -i {} -m GY -w M0 -t e --run_id gy94].format, parameter[name[aln_file]]]
variable[p1] assign[=] call[name[sp].Popen, parameter[name[igphyml_cmd1]]]
<ast.Tuple object at 0x7da18f723a00> assign[=] call[name[p1].communicate, parameter[]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[name[stdout1] + constant[
]]]]
call[name[print], parameter[binary_operation[name[stderr1] + constant[
]]]]
variable[intermediate] assign[=] binary_operation[name[input_file] + constant[_igphyml_tree.txt_gy94]]
variable[igphyml_cmd2] assign[=] call[constant[igphyml -i {0} -m HLP17 --root {1} -o lr -u {}_igphyml_tree.txt_gy94 -o {}].format, parameter[name[input_file], name[root], name[tree_file]]]
variable[p2] assign[=] call[name[sp].Popen, parameter[name[igphyml_cmd2]]]
<ast.Tuple object at 0x7da207f992d0> assign[=] call[name[p2].communicate, parameter[]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[name[stdout2] + constant[
]]]]
call[name[print], parameter[binary_operation[name[stderr2] + constant[
]]]]
return[binary_operation[name[tree_file] + constant[_igphyml_tree.txt]]] | keyword[def] identifier[igphyml] ( identifier[input_file] = keyword[None] , identifier[tree_file] = keyword[None] , identifier[root] = keyword[None] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[if] identifier[shutil] . identifier[which] ( literal[string] ) keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[igphyml_cmd1] = literal[string] . identifier[format] ( identifier[aln_file] )
identifier[p1] = identifier[sp] . identifier[Popen] ( identifier[igphyml_cmd1] , identifier[stdout] = identifier[sp] . identifier[PIPE] , identifier[stderr] = identifier[sp] . identifier[PIPE] )
identifier[stdout1] , identifier[stderr1] = identifier[p1] . identifier[communicate] ()
keyword[if] identifier[verbose] :
identifier[print] ( identifier[stdout1] + literal[string] )
identifier[print] ( identifier[stderr1] + literal[string] )
identifier[intermediate] = identifier[input_file] + literal[string]
identifier[igphyml_cmd2] = literal[string] . identifier[format] ( identifier[input_file] ,
identifier[root] ,
identifier[tree_file] )
identifier[p2] = identifier[sp] . identifier[Popen] ( identifier[igphyml_cmd2] , identifier[stdout] = identifier[sp] . identifier[PIPE] , identifier[stderr] = identifier[sp] . identifier[PIPE] )
identifier[stdout2] , identifier[stderr2] = identifier[p2] . identifier[communicate] ()
keyword[if] identifier[verbose] :
identifier[print] ( identifier[stdout2] + literal[string] )
identifier[print] ( identifier[stderr2] + literal[string] )
keyword[return] identifier[tree_file] + literal[string] | def igphyml(input_file=None, tree_file=None, root=None, verbose=False):
"""
Computes a phylogenetic tree using IgPhyML.
.. note::
IgPhyML must be installed. It can be downloaded from https://github.com/kbhoehn/IgPhyML.
Args:
input_file (str): Path to a Phylip-formatted multiple sequence alignment. Required.
tree_file (str): Path to the output tree file.
root (str): Name of the root sequence. Required.
verbose (bool): If `True`, prints the standard output and standard error for each IgPhyML run.
Default is `False`.
"""
if shutil.which('igphyml') is None:
raise RuntimeError('It appears that IgPhyML is not installed.\nPlease install and try again.') # depends on [control=['if'], data=[]]
# first, tree topology is estimated with the M0/GY94 model
igphyml_cmd1 = 'igphyml -i {} -m GY -w M0 -t e --run_id gy94'.format(aln_file)
p1 = sp.Popen(igphyml_cmd1, stdout=sp.PIPE, stderr=sp.PIPE)
(stdout1, stderr1) = p1.communicate()
if verbose:
print(stdout1 + '\n')
print(stderr1 + '\n\n') # depends on [control=['if'], data=[]]
intermediate = input_file + '_igphyml_tree.txt_gy94'
# now we fit the HLP17 model once the tree topology is fixed
igphyml_cmd2 = 'igphyml -i {0} -m HLP17 --root {1} -o lr -u {}_igphyml_tree.txt_gy94 -o {}'.format(input_file, root, tree_file)
p2 = sp.Popen(igphyml_cmd2, stdout=sp.PIPE, stderr=sp.PIPE)
(stdout2, stderr2) = p2.communicate()
if verbose:
print(stdout2 + '\n')
print(stderr2 + '\n') # depends on [control=['if'], data=[]]
return tree_file + '_igphyml_tree.txt' |
def searchForThreads(self, name, limit=10):
"""
Find and get a thread by its name
:param name: Name of the thread
:param limit: The max. amount of groups to fetch
:return: :class:`models.User`, :class:`models.Group` and :class:`models.Page` objects, ordered by relevance
:rtype: list
:raises: FBchatException if request failed
"""
params = {"search": name, "limit": limit}
j = self.graphql_request(GraphQL(query=GraphQL.SEARCH_THREAD, params=params))
rtn = []
for node in j[name]["threads"]["nodes"]:
if node["__typename"] == "User":
rtn.append(User._from_graphql(node))
elif node["__typename"] == "MessageThread":
# MessageThread => Group thread
rtn.append(Group._from_graphql(node))
elif node["__typename"] == "Page":
rtn.append(Page._from_graphql(node))
elif node["__typename"] == "Group":
# We don't handle Facebook "Groups"
pass
else:
log.warning(
"Unknown type {} in {}".format(repr(node["__typename"]), node)
)
return rtn | def function[searchForThreads, parameter[self, name, limit]]:
constant[
Find and get a thread by its name
:param name: Name of the thread
:param limit: The max. amount of groups to fetch
:return: :class:`models.User`, :class:`models.Group` and :class:`models.Page` objects, ordered by relevance
:rtype: list
:raises: FBchatException if request failed
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b19cc1c0>, <ast.Constant object at 0x7da1b19cc820>], [<ast.Name object at 0x7da1b19cce80>, <ast.Name object at 0x7da1b19cd6c0>]]
variable[j] assign[=] call[name[self].graphql_request, parameter[call[name[GraphQL], parameter[]]]]
variable[rtn] assign[=] list[[]]
for taget[name[node]] in starred[call[call[call[name[j]][name[name]]][constant[threads]]][constant[nodes]]] begin[:]
if compare[call[name[node]][constant[__typename]] equal[==] constant[User]] begin[:]
call[name[rtn].append, parameter[call[name[User]._from_graphql, parameter[name[node]]]]]
return[name[rtn]] | keyword[def] identifier[searchForThreads] ( identifier[self] , identifier[name] , identifier[limit] = literal[int] ):
literal[string]
identifier[params] ={ literal[string] : identifier[name] , literal[string] : identifier[limit] }
identifier[j] = identifier[self] . identifier[graphql_request] ( identifier[GraphQL] ( identifier[query] = identifier[GraphQL] . identifier[SEARCH_THREAD] , identifier[params] = identifier[params] ))
identifier[rtn] =[]
keyword[for] identifier[node] keyword[in] identifier[j] [ identifier[name] ][ literal[string] ][ literal[string] ]:
keyword[if] identifier[node] [ literal[string] ]== literal[string] :
identifier[rtn] . identifier[append] ( identifier[User] . identifier[_from_graphql] ( identifier[node] ))
keyword[elif] identifier[node] [ literal[string] ]== literal[string] :
identifier[rtn] . identifier[append] ( identifier[Group] . identifier[_from_graphql] ( identifier[node] ))
keyword[elif] identifier[node] [ literal[string] ]== literal[string] :
identifier[rtn] . identifier[append] ( identifier[Page] . identifier[_from_graphql] ( identifier[node] ))
keyword[elif] identifier[node] [ literal[string] ]== literal[string] :
keyword[pass]
keyword[else] :
identifier[log] . identifier[warning] (
literal[string] . identifier[format] ( identifier[repr] ( identifier[node] [ literal[string] ]), identifier[node] )
)
keyword[return] identifier[rtn] | def searchForThreads(self, name, limit=10):
"""
Find and get a thread by its name
:param name: Name of the thread
:param limit: The max. amount of groups to fetch
:return: :class:`models.User`, :class:`models.Group` and :class:`models.Page` objects, ordered by relevance
:rtype: list
:raises: FBchatException if request failed
"""
params = {'search': name, 'limit': limit}
j = self.graphql_request(GraphQL(query=GraphQL.SEARCH_THREAD, params=params))
rtn = []
for node in j[name]['threads']['nodes']:
if node['__typename'] == 'User':
rtn.append(User._from_graphql(node)) # depends on [control=['if'], data=[]]
elif node['__typename'] == 'MessageThread':
# MessageThread => Group thread
rtn.append(Group._from_graphql(node)) # depends on [control=['if'], data=[]]
elif node['__typename'] == 'Page':
rtn.append(Page._from_graphql(node)) # depends on [control=['if'], data=[]]
elif node['__typename'] == 'Group':
# We don't handle Facebook "Groups"
pass # depends on [control=['if'], data=[]]
else:
log.warning('Unknown type {} in {}'.format(repr(node['__typename']), node)) # depends on [control=['for'], data=['node']]
return rtn |
def connect(self):
"""
Starts up an authentication session for the client using cookie
authentication if necessary.
"""
if self.r_session:
self.session_logout()
if self.admin_party:
self._use_iam = False
self.r_session = ClientSession(
timeout=self._timeout
)
elif self._use_basic_auth:
self._use_iam = False
self.r_session = BasicSession(
self._user,
self._auth_token,
self.server_url,
timeout=self._timeout
)
elif self._use_iam:
self.r_session = IAMSession(
self._auth_token,
self.server_url,
auto_renew=self._auto_renew,
client_id=self._iam_client_id,
client_secret=self._iam_client_secret,
timeout=self._timeout
)
else:
self.r_session = CookieSession(
self._user,
self._auth_token,
self.server_url,
auto_renew=self._auto_renew,
timeout=self._timeout
)
# If a Transport Adapter was supplied add it to the session
if self.adapter is not None:
self.r_session.mount(self.server_url, self.adapter)
if self._client_user_header is not None:
self.r_session.headers.update(self._client_user_header)
self.session_login()
# Utilize an event hook to append to the response message
# using :func:`~cloudant.common_util.append_response_error_content`
self.r_session.hooks['response'].append(append_response_error_content) | def function[connect, parameter[self]]:
constant[
Starts up an authentication session for the client using cookie
authentication if necessary.
]
if name[self].r_session begin[:]
call[name[self].session_logout, parameter[]]
if name[self].admin_party begin[:]
name[self]._use_iam assign[=] constant[False]
name[self].r_session assign[=] call[name[ClientSession], parameter[]]
if compare[name[self].adapter is_not constant[None]] begin[:]
call[name[self].r_session.mount, parameter[name[self].server_url, name[self].adapter]]
if compare[name[self]._client_user_header is_not constant[None]] begin[:]
call[name[self].r_session.headers.update, parameter[name[self]._client_user_header]]
call[name[self].session_login, parameter[]]
call[call[name[self].r_session.hooks][constant[response]].append, parameter[name[append_response_error_content]]] | keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[r_session] :
identifier[self] . identifier[session_logout] ()
keyword[if] identifier[self] . identifier[admin_party] :
identifier[self] . identifier[_use_iam] = keyword[False]
identifier[self] . identifier[r_session] = identifier[ClientSession] (
identifier[timeout] = identifier[self] . identifier[_timeout]
)
keyword[elif] identifier[self] . identifier[_use_basic_auth] :
identifier[self] . identifier[_use_iam] = keyword[False]
identifier[self] . identifier[r_session] = identifier[BasicSession] (
identifier[self] . identifier[_user] ,
identifier[self] . identifier[_auth_token] ,
identifier[self] . identifier[server_url] ,
identifier[timeout] = identifier[self] . identifier[_timeout]
)
keyword[elif] identifier[self] . identifier[_use_iam] :
identifier[self] . identifier[r_session] = identifier[IAMSession] (
identifier[self] . identifier[_auth_token] ,
identifier[self] . identifier[server_url] ,
identifier[auto_renew] = identifier[self] . identifier[_auto_renew] ,
identifier[client_id] = identifier[self] . identifier[_iam_client_id] ,
identifier[client_secret] = identifier[self] . identifier[_iam_client_secret] ,
identifier[timeout] = identifier[self] . identifier[_timeout]
)
keyword[else] :
identifier[self] . identifier[r_session] = identifier[CookieSession] (
identifier[self] . identifier[_user] ,
identifier[self] . identifier[_auth_token] ,
identifier[self] . identifier[server_url] ,
identifier[auto_renew] = identifier[self] . identifier[_auto_renew] ,
identifier[timeout] = identifier[self] . identifier[_timeout]
)
keyword[if] identifier[self] . identifier[adapter] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[r_session] . identifier[mount] ( identifier[self] . identifier[server_url] , identifier[self] . identifier[adapter] )
keyword[if] identifier[self] . identifier[_client_user_header] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[r_session] . identifier[headers] . identifier[update] ( identifier[self] . identifier[_client_user_header] )
identifier[self] . identifier[session_login] ()
identifier[self] . identifier[r_session] . identifier[hooks] [ literal[string] ]. identifier[append] ( identifier[append_response_error_content] ) | def connect(self):
"""
Starts up an authentication session for the client using cookie
authentication if necessary.
"""
if self.r_session:
self.session_logout() # depends on [control=['if'], data=[]]
if self.admin_party:
self._use_iam = False
self.r_session = ClientSession(timeout=self._timeout) # depends on [control=['if'], data=[]]
elif self._use_basic_auth:
self._use_iam = False
self.r_session = BasicSession(self._user, self._auth_token, self.server_url, timeout=self._timeout) # depends on [control=['if'], data=[]]
elif self._use_iam:
self.r_session = IAMSession(self._auth_token, self.server_url, auto_renew=self._auto_renew, client_id=self._iam_client_id, client_secret=self._iam_client_secret, timeout=self._timeout) # depends on [control=['if'], data=[]]
else:
self.r_session = CookieSession(self._user, self._auth_token, self.server_url, auto_renew=self._auto_renew, timeout=self._timeout)
# If a Transport Adapter was supplied add it to the session
if self.adapter is not None:
self.r_session.mount(self.server_url, self.adapter) # depends on [control=['if'], data=[]]
if self._client_user_header is not None:
self.r_session.headers.update(self._client_user_header) # depends on [control=['if'], data=[]]
self.session_login()
# Utilize an event hook to append to the response message
# using :func:`~cloudant.common_util.append_response_error_content`
self.r_session.hooks['response'].append(append_response_error_content) |
def add_feature(self, obj=None, geometry=None, properties=None):
"""
Adds a given feature. If obj isn't specified, geometry and properties can be set as arguments directly.
Parameters:
- **obj**: Another feature instance, an object with the \_\_geo_interface__ or a geojson dictionary of the Feature type.
- **geometry** (optional): Anything that the Geometry instance can accept.
- **properties** (optional): A dictionary of key-value property pairs.
"""
properties = properties or {}
if isinstance(obj, Feature):
# instead of creating copy, the original feat should reference the same one that was added here
feat = obj._data
elif isinstance(obj, dict):
feat = obj.copy()
else:
feat = Feature(geometry=geometry, properties=properties).__geo_interface__
self._data["features"].append(feat) | def function[add_feature, parameter[self, obj, geometry, properties]]:
constant[
Adds a given feature. If obj isn't specified, geometry and properties can be set as arguments directly.
Parameters:
- **obj**: Another feature instance, an object with the \_\_geo_interface__ or a geojson dictionary of the Feature type.
- **geometry** (optional): Anything that the Geometry instance can accept.
- **properties** (optional): A dictionary of key-value property pairs.
]
variable[properties] assign[=] <ast.BoolOp object at 0x7da2041db820>
if call[name[isinstance], parameter[name[obj], name[Feature]]] begin[:]
variable[feat] assign[=] name[obj]._data
call[call[name[self]._data][constant[features]].append, parameter[name[feat]]] | keyword[def] identifier[add_feature] ( identifier[self] , identifier[obj] = keyword[None] , identifier[geometry] = keyword[None] , identifier[properties] = keyword[None] ):
literal[string]
identifier[properties] = identifier[properties] keyword[or] {}
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Feature] ):
identifier[feat] = identifier[obj] . identifier[_data]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
identifier[feat] = identifier[obj] . identifier[copy] ()
keyword[else] :
identifier[feat] = identifier[Feature] ( identifier[geometry] = identifier[geometry] , identifier[properties] = identifier[properties] ). identifier[__geo_interface__]
identifier[self] . identifier[_data] [ literal[string] ]. identifier[append] ( identifier[feat] ) | def add_feature(self, obj=None, geometry=None, properties=None):
"""
Adds a given feature. If obj isn't specified, geometry and properties can be set as arguments directly.
Parameters:
- **obj**: Another feature instance, an object with the \\_\\_geo_interface__ or a geojson dictionary of the Feature type.
- **geometry** (optional): Anything that the Geometry instance can accept.
- **properties** (optional): A dictionary of key-value property pairs.
"""
properties = properties or {}
if isinstance(obj, Feature):
# instead of creating copy, the original feat should reference the same one that was added here
feat = obj._data # depends on [control=['if'], data=[]]
elif isinstance(obj, dict):
feat = obj.copy() # depends on [control=['if'], data=[]]
else:
feat = Feature(geometry=geometry, properties=properties).__geo_interface__
self._data['features'].append(feat) |
def _create_bundle(self, data):
"""Return a bundle initialised by the given dict."""
kwargs = {}
filters = None
if isinstance(data, dict):
kwargs.update(
filters=data.get('filters', None),
output=data.get('output', None),
debug=data.get('debug', None),
extra=data.get('extra', {}),
config=data.get('config', {}),
depends=data.get('depends', None))
bundle = Bundle(*list(self._yield_bundle_contents(data)), **kwargs)
return self._auto_filter_bundle(bundle) | def function[_create_bundle, parameter[self, data]]:
constant[Return a bundle initialised by the given dict.]
variable[kwargs] assign[=] dictionary[[], []]
variable[filters] assign[=] constant[None]
if call[name[isinstance], parameter[name[data], name[dict]]] begin[:]
call[name[kwargs].update, parameter[]]
variable[bundle] assign[=] call[name[Bundle], parameter[<ast.Starred object at 0x7da20e955330>]]
return[call[name[self]._auto_filter_bundle, parameter[name[bundle]]]] | keyword[def] identifier[_create_bundle] ( identifier[self] , identifier[data] ):
literal[string]
identifier[kwargs] ={}
identifier[filters] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ):
identifier[kwargs] . identifier[update] (
identifier[filters] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[output] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[debug] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ),
identifier[extra] = identifier[data] . identifier[get] ( literal[string] ,{}),
identifier[config] = identifier[data] . identifier[get] ( literal[string] ,{}),
identifier[depends] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ))
identifier[bundle] = identifier[Bundle] (* identifier[list] ( identifier[self] . identifier[_yield_bundle_contents] ( identifier[data] )),** identifier[kwargs] )
keyword[return] identifier[self] . identifier[_auto_filter_bundle] ( identifier[bundle] ) | def _create_bundle(self, data):
"""Return a bundle initialised by the given dict."""
kwargs = {}
filters = None
if isinstance(data, dict):
kwargs.update(filters=data.get('filters', None), output=data.get('output', None), debug=data.get('debug', None), extra=data.get('extra', {}), config=data.get('config', {}), depends=data.get('depends', None)) # depends on [control=['if'], data=[]]
bundle = Bundle(*list(self._yield_bundle_contents(data)), **kwargs)
return self._auto_filter_bundle(bundle) |
def _set_ipv6_address(self, v, load=False):
"""
Setter method for ipv6_address, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_config/address/ipv6_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_address() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("address",ipv6_address.ipv6_address, yang_name="ipv6-address", rest_name="ipv6-address", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='address', extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}), is_container='list', yang_name="ipv6-address", rest_name="ipv6-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ipv6_address must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("address",ipv6_address.ipv6_address, yang_name="ipv6-address", rest_name="ipv6-address", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='address', extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}), is_container='list', yang_name="ipv6-address", rest_name="ipv6-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='list', is_config=True)""",
})
self.__ipv6_address = t
if hasattr(self, '_set'):
self._set() | def function[_set_ipv6_address, parameter[self, v, load]]:
constant[
Setter method for ipv6_address, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_config/address/ipv6_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_address() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da20c7cb1c0>
name[self].__ipv6_address assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_ipv6_address] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[ipv6_address] . identifier[ipv6_address] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[True] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__ipv6_address] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_ipv6_address(self, v, load=False):
"""
Setter method for ipv6_address, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_config/address/ipv6_address (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_address() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('address', ipv6_address.ipv6_address, yang_name='ipv6-address', rest_name='ipv6-address', parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='address', extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}), is_container='list', yang_name='ipv6-address', rest_name='ipv6-address', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the IP address of an interface', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-no-match-completion': None, u'callpoint': u'phy-intf-ipv6-addr-cp'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-config', defining_module='brocade-ipv6-config', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'ipv6_address must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("address",ipv6_address.ipv6_address, yang_name="ipv6-address", rest_name="ipv6-address", parent=self, is_container=\'list\', user_ordered=True, path_helper=self._path_helper, yang_keys=\'address\', extensions={u\'tailf-common\': {u\'info\': u\'Set the IP address of an interface\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-compact-syntax\': None, u\'cli-drop-node-name\': None, u\'cli-no-match-completion\': None, u\'callpoint\': u\'phy-intf-ipv6-addr-cp\'}}), is_container=\'list\', yang_name="ipv6-address", rest_name="ipv6-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Set the IP address of an interface\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-compact-syntax\': None, u\'cli-drop-node-name\': None, u\'cli-no-match-completion\': None, u\'callpoint\': u\'phy-intf-ipv6-addr-cp\'}}, namespace=\'urn:brocade.com:mgmt:brocade-ipv6-config\', defining_module=\'brocade-ipv6-config\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__ipv6_address = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def edge_average(a):
"Return the mean value around the edge of an array."
if len(np.ravel(a)) < 2:
return float(a[0])
else:
top_edge = a[0]
bottom_edge = a[-1]
left_edge = a[1:-1,0]
right_edge = a[1:-1,-1]
edge_sum = np.sum(top_edge) + np.sum(bottom_edge) + np.sum(left_edge) + np.sum(right_edge)
num_values = len(top_edge)+len(bottom_edge)+len(left_edge)+len(right_edge)
return float(edge_sum)/num_values | def function[edge_average, parameter[a]]:
constant[Return the mean value around the edge of an array.]
if compare[call[name[len], parameter[call[name[np].ravel, parameter[name[a]]]]] less[<] constant[2]] begin[:]
return[call[name[float], parameter[call[name[a]][constant[0]]]]] | keyword[def] identifier[edge_average] ( identifier[a] ):
literal[string]
keyword[if] identifier[len] ( identifier[np] . identifier[ravel] ( identifier[a] ))< literal[int] :
keyword[return] identifier[float] ( identifier[a] [ literal[int] ])
keyword[else] :
identifier[top_edge] = identifier[a] [ literal[int] ]
identifier[bottom_edge] = identifier[a] [- literal[int] ]
identifier[left_edge] = identifier[a] [ literal[int] :- literal[int] , literal[int] ]
identifier[right_edge] = identifier[a] [ literal[int] :- literal[int] ,- literal[int] ]
identifier[edge_sum] = identifier[np] . identifier[sum] ( identifier[top_edge] )+ identifier[np] . identifier[sum] ( identifier[bottom_edge] )+ identifier[np] . identifier[sum] ( identifier[left_edge] )+ identifier[np] . identifier[sum] ( identifier[right_edge] )
identifier[num_values] = identifier[len] ( identifier[top_edge] )+ identifier[len] ( identifier[bottom_edge] )+ identifier[len] ( identifier[left_edge] )+ identifier[len] ( identifier[right_edge] )
keyword[return] identifier[float] ( identifier[edge_sum] )/ identifier[num_values] | def edge_average(a):
"""Return the mean value around the edge of an array."""
if len(np.ravel(a)) < 2:
return float(a[0]) # depends on [control=['if'], data=[]]
else:
top_edge = a[0]
bottom_edge = a[-1]
left_edge = a[1:-1, 0]
right_edge = a[1:-1, -1]
edge_sum = np.sum(top_edge) + np.sum(bottom_edge) + np.sum(left_edge) + np.sum(right_edge)
num_values = len(top_edge) + len(bottom_edge) + len(left_edge) + len(right_edge)
return float(edge_sum) / num_values |
def _central_slopes_directions(self, data, dX, dY):
"""
Calculates magnitude/direction of slopes using central difference
"""
shp = np.array(data.shape) - 1
direction = np.full(data.shape, FLAT_ID_INT, 'float64')
mag = np.full(direction, FLAT_ID_INT, 'float64')
ind = 0
d1, d2, theta = _get_d1_d2(dX, dY, ind, [0, 1], [1, 1], shp)
s2 = (data[0:-2, 1:-1] - data[2:, 1:-1]) / d2
s1 = -(data[1:-1, 0:-2] - data[1:-1, 2:]) / d1
direction[1:-1, 1:-1] = np.arctan2(s2, s1) + np.pi
mag = np.sqrt(s1**2 + s2**2)
return mag, direction | def function[_central_slopes_directions, parameter[self, data, dX, dY]]:
constant[
Calculates magnitude/direction of slopes using central difference
]
variable[shp] assign[=] binary_operation[call[name[np].array, parameter[name[data].shape]] - constant[1]]
variable[direction] assign[=] call[name[np].full, parameter[name[data].shape, name[FLAT_ID_INT], constant[float64]]]
variable[mag] assign[=] call[name[np].full, parameter[name[direction], name[FLAT_ID_INT], constant[float64]]]
variable[ind] assign[=] constant[0]
<ast.Tuple object at 0x7da207f00f70> assign[=] call[name[_get_d1_d2], parameter[name[dX], name[dY], name[ind], list[[<ast.Constant object at 0x7da207f03ee0>, <ast.Constant object at 0x7da207f03fa0>]], list[[<ast.Constant object at 0x7da207f03070>, <ast.Constant object at 0x7da207f036a0>]], name[shp]]]
variable[s2] assign[=] binary_operation[binary_operation[call[name[data]][tuple[[<ast.Slice object at 0x7da207f02f20>, <ast.Slice object at 0x7da207f007c0>]]] - call[name[data]][tuple[[<ast.Slice object at 0x7da207f03670>, <ast.Slice object at 0x7da207f03c10>]]]] / name[d2]]
variable[s1] assign[=] binary_operation[<ast.UnaryOp object at 0x7da207f02920> / name[d1]]
call[name[direction]][tuple[[<ast.Slice object at 0x7da207f01120>, <ast.Slice object at 0x7da207f02a10>]]] assign[=] binary_operation[call[name[np].arctan2, parameter[name[s2], name[s1]]] + name[np].pi]
variable[mag] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[s1] ** constant[2]] + binary_operation[name[s2] ** constant[2]]]]]
return[tuple[[<ast.Name object at 0x7da207f01390>, <ast.Name object at 0x7da207f00ac0>]]] | keyword[def] identifier[_central_slopes_directions] ( identifier[self] , identifier[data] , identifier[dX] , identifier[dY] ):
literal[string]
identifier[shp] = identifier[np] . identifier[array] ( identifier[data] . identifier[shape] )- literal[int]
identifier[direction] = identifier[np] . identifier[full] ( identifier[data] . identifier[shape] , identifier[FLAT_ID_INT] , literal[string] )
identifier[mag] = identifier[np] . identifier[full] ( identifier[direction] , identifier[FLAT_ID_INT] , literal[string] )
identifier[ind] = literal[int]
identifier[d1] , identifier[d2] , identifier[theta] = identifier[_get_d1_d2] ( identifier[dX] , identifier[dY] , identifier[ind] ,[ literal[int] , literal[int] ],[ literal[int] , literal[int] ], identifier[shp] )
identifier[s2] =( identifier[data] [ literal[int] :- literal[int] , literal[int] :- literal[int] ]- identifier[data] [ literal[int] :, literal[int] :- literal[int] ])/ identifier[d2]
identifier[s1] =-( identifier[data] [ literal[int] :- literal[int] , literal[int] :- literal[int] ]- identifier[data] [ literal[int] :- literal[int] , literal[int] :])/ identifier[d1]
identifier[direction] [ literal[int] :- literal[int] , literal[int] :- literal[int] ]= identifier[np] . identifier[arctan2] ( identifier[s2] , identifier[s1] )+ identifier[np] . identifier[pi]
identifier[mag] = identifier[np] . identifier[sqrt] ( identifier[s1] ** literal[int] + identifier[s2] ** literal[int] )
keyword[return] identifier[mag] , identifier[direction] | def _central_slopes_directions(self, data, dX, dY):
"""
Calculates magnitude/direction of slopes using central difference
"""
shp = np.array(data.shape) - 1
direction = np.full(data.shape, FLAT_ID_INT, 'float64')
mag = np.full(direction, FLAT_ID_INT, 'float64')
ind = 0
(d1, d2, theta) = _get_d1_d2(dX, dY, ind, [0, 1], [1, 1], shp)
s2 = (data[0:-2, 1:-1] - data[2:, 1:-1]) / d2
s1 = -(data[1:-1, 0:-2] - data[1:-1, 2:]) / d1
direction[1:-1, 1:-1] = np.arctan2(s2, s1) + np.pi
mag = np.sqrt(s1 ** 2 + s2 ** 2)
return (mag, direction) |
def add_download_task(self, source_url, remote_path,
rate_limit=None, timeout=60 * 60,
expires=None, callback='', **kwargs):
"""添加离线下载任务,实现单个文件离线下载.
:param source_url: 源文件的URL。
:param remote_path: 下载后的文件保存路径。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param rate_limit: 下载限速,默认不限速。
:type rate_limit: int or long
:param timeout: 下载超时时间,默认3600秒。
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:param callback: 下载完毕后的回调,默认为空。
:type callback: str
:return: Response 对象
"""
data = {
'source_url': source_url,
'save_path': remote_path,
'expires': expires,
'rate_limit': rate_limit,
'timeout': timeout,
'callback': callback,
}
return self._request('services/cloud_dl', 'add_task',
data=data, **kwargs) | def function[add_download_task, parameter[self, source_url, remote_path, rate_limit, timeout, expires, callback]]:
constant[添加离线下载任务,实现单个文件离线下载.
:param source_url: 源文件的URL。
:param remote_path: 下载后的文件保存路径。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\r, \n, \t, 空格, \0, \x0B`` 。
:param rate_limit: 下载限速,默认不限速。
:type rate_limit: int or long
:param timeout: 下载超时时间,默认3600秒。
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:param callback: 下载完毕后的回调,默认为空。
:type callback: str
:return: Response 对象
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b10356f0>, <ast.Constant object at 0x7da1b1037940>, <ast.Constant object at 0x7da1b10361d0>, <ast.Constant object at 0x7da1b1035330>, <ast.Constant object at 0x7da1b10368c0>, <ast.Constant object at 0x7da1b1037e50>], [<ast.Name object at 0x7da1b10376d0>, <ast.Name object at 0x7da1b1035450>, <ast.Name object at 0x7da1b1035b70>, <ast.Name object at 0x7da1b10367a0>, <ast.Name object at 0x7da1b1036c80>, <ast.Name object at 0x7da1b1034be0>]]
return[call[name[self]._request, parameter[constant[services/cloud_dl], constant[add_task]]]] | keyword[def] identifier[add_download_task] ( identifier[self] , identifier[source_url] , identifier[remote_path] ,
identifier[rate_limit] = keyword[None] , identifier[timeout] = literal[int] * literal[int] ,
identifier[expires] = keyword[None] , identifier[callback] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[data] ={
literal[string] : identifier[source_url] ,
literal[string] : identifier[remote_path] ,
literal[string] : identifier[expires] ,
literal[string] : identifier[rate_limit] ,
literal[string] : identifier[timeout] ,
literal[string] : identifier[callback] ,
}
keyword[return] identifier[self] . identifier[_request] ( literal[string] , literal[string] ,
identifier[data] = identifier[data] ,** identifier[kwargs] ) | def add_download_task(self, source_url, remote_path, rate_limit=None, timeout=60 * 60, expires=None, callback='', **kwargs):
"""添加离线下载任务,实现单个文件离线下载.
:param source_url: 源文件的URL。
:param remote_path: 下载后的文件保存路径。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param rate_limit: 下载限速,默认不限速。
:type rate_limit: int or long
:param timeout: 下载超时时间,默认3600秒。
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:param callback: 下载完毕后的回调,默认为空。
:type callback: str
:return: Response 对象
"""
data = {'source_url': source_url, 'save_path': remote_path, 'expires': expires, 'rate_limit': rate_limit, 'timeout': timeout, 'callback': callback}
return self._request('services/cloud_dl', 'add_task', data=data, **kwargs) |
def next_state(self):
"""This is a method that will be called when the time remaining ends.
The current state can be: roasting, cooling, idle, sleeping, connecting,
or unkown."""
self.active_recipe_item += 1
if self.active_recipe_item >= len(self.recipe):
# we're done!
return
# show state step on screen
print("--------------------------------------------")
print("Setting next process step: %d" % self.active_recipe_item)
print("time:%d, target: %ddegF, fan: %d, state: %s" %
(self.recipe[self.active_recipe_item]['time_remaining'],
self.recipe[self.active_recipe_item]['target_temp'],
self.recipe[self.active_recipe_item]['fan_speed'],
self.recipe[self.active_recipe_item]['state']
))
print("--------------------------------------------")
# set values for next state
self.roaster.time_remaining = (
self.recipe[self.active_recipe_item]['time_remaining'])
self.roaster.target_temp = (
self.recipe[self.active_recipe_item]['target_temp'])
self.roaster.fan_speed = (
self.recipe[self.active_recipe_item]['fan_speed'])
# set state
if(self.recipe[self.active_recipe_item]['state'] == 'roasting'):
self.roaster.roast()
elif(self.recipe[self.active_recipe_item]['state'] == 'cooling'):
self.roaster.cool()
elif(self.recipe[self.active_recipe_item]['state'] == 'idle'):
self.roaster.idle()
elif(self.recipe[self.active_recipe_item]['state'] == 'cooling'):
self.roaster.sleep() | def function[next_state, parameter[self]]:
constant[This is a method that will be called when the time remaining ends.
The current state can be: roasting, cooling, idle, sleeping, connecting,
or unkown.]
<ast.AugAssign object at 0x7da2041db250>
if compare[name[self].active_recipe_item greater_or_equal[>=] call[name[len], parameter[name[self].recipe]]] begin[:]
return[None]
call[name[print], parameter[constant[--------------------------------------------]]]
call[name[print], parameter[binary_operation[constant[Setting next process step: %d] <ast.Mod object at 0x7da2590d6920> name[self].active_recipe_item]]]
call[name[print], parameter[binary_operation[constant[time:%d, target: %ddegF, fan: %d, state: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da2041d9c30>, <ast.Subscript object at 0x7da2041da320>, <ast.Subscript object at 0x7da2041da920>, <ast.Subscript object at 0x7da2041d8bb0>]]]]]
call[name[print], parameter[constant[--------------------------------------------]]]
name[self].roaster.time_remaining assign[=] call[call[name[self].recipe][name[self].active_recipe_item]][constant[time_remaining]]
name[self].roaster.target_temp assign[=] call[call[name[self].recipe][name[self].active_recipe_item]][constant[target_temp]]
name[self].roaster.fan_speed assign[=] call[call[name[self].recipe][name[self].active_recipe_item]][constant[fan_speed]]
if compare[call[call[name[self].recipe][name[self].active_recipe_item]][constant[state]] equal[==] constant[roasting]] begin[:]
call[name[self].roaster.roast, parameter[]] | keyword[def] identifier[next_state] ( identifier[self] ):
literal[string]
identifier[self] . identifier[active_recipe_item] += literal[int]
keyword[if] identifier[self] . identifier[active_recipe_item] >= identifier[len] ( identifier[self] . identifier[recipe] ):
keyword[return]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[self] . identifier[active_recipe_item] )
identifier[print] ( literal[string] %
( identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ],
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ],
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ],
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ]
))
identifier[print] ( literal[string] )
identifier[self] . identifier[roaster] . identifier[time_remaining] =(
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ])
identifier[self] . identifier[roaster] . identifier[target_temp] =(
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ])
identifier[self] . identifier[roaster] . identifier[fan_speed] =(
identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ])
keyword[if] ( identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ]== literal[string] ):
identifier[self] . identifier[roaster] . identifier[roast] ()
keyword[elif] ( identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ]== literal[string] ):
identifier[self] . identifier[roaster] . identifier[cool] ()
keyword[elif] ( identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ]== literal[string] ):
identifier[self] . identifier[roaster] . identifier[idle] ()
keyword[elif] ( identifier[self] . identifier[recipe] [ identifier[self] . identifier[active_recipe_item] ][ literal[string] ]== literal[string] ):
identifier[self] . identifier[roaster] . identifier[sleep] () | def next_state(self):
"""This is a method that will be called when the time remaining ends.
The current state can be: roasting, cooling, idle, sleeping, connecting,
or unkown."""
self.active_recipe_item += 1
if self.active_recipe_item >= len(self.recipe):
# we're done!
return # depends on [control=['if'], data=[]]
# show state step on screen
print('--------------------------------------------')
print('Setting next process step: %d' % self.active_recipe_item)
print('time:%d, target: %ddegF, fan: %d, state: %s' % (self.recipe[self.active_recipe_item]['time_remaining'], self.recipe[self.active_recipe_item]['target_temp'], self.recipe[self.active_recipe_item]['fan_speed'], self.recipe[self.active_recipe_item]['state']))
print('--------------------------------------------')
# set values for next state
self.roaster.time_remaining = self.recipe[self.active_recipe_item]['time_remaining']
self.roaster.target_temp = self.recipe[self.active_recipe_item]['target_temp']
self.roaster.fan_speed = self.recipe[self.active_recipe_item]['fan_speed']
# set state
if self.recipe[self.active_recipe_item]['state'] == 'roasting':
self.roaster.roast() # depends on [control=['if'], data=[]]
elif self.recipe[self.active_recipe_item]['state'] == 'cooling':
self.roaster.cool() # depends on [control=['if'], data=[]]
elif self.recipe[self.active_recipe_item]['state'] == 'idle':
self.roaster.idle() # depends on [control=['if'], data=[]]
elif self.recipe[self.active_recipe_item]['state'] == 'cooling':
self.roaster.sleep() # depends on [control=['if'], data=[]] |
def put(self, url, html, cache_info=None):
"""
Put response into cache
:param url: Url to cache
:type url: str | unicode
:param html: HTML content of url
:type html: str | unicode
:param cache_info: Cache Info (default: None)
:type cache_info: floscraper.models.CacheInfo
:rtype: None
"""
key = hashlib.md5(url).hexdigest()
try:
self._cache_set(key, html)
except:
self.exception("Failed to write cache")
return
self.update(url, cache_info) | def function[put, parameter[self, url, html, cache_info]]:
constant[
Put response into cache
:param url: Url to cache
:type url: str | unicode
:param html: HTML content of url
:type html: str | unicode
:param cache_info: Cache Info (default: None)
:type cache_info: floscraper.models.CacheInfo
:rtype: None
]
variable[key] assign[=] call[call[name[hashlib].md5, parameter[name[url]]].hexdigest, parameter[]]
<ast.Try object at 0x7da18bccb0d0>
call[name[self].update, parameter[name[url], name[cache_info]]] | keyword[def] identifier[put] ( identifier[self] , identifier[url] , identifier[html] , identifier[cache_info] = keyword[None] ):
literal[string]
identifier[key] = identifier[hashlib] . identifier[md5] ( identifier[url] ). identifier[hexdigest] ()
keyword[try] :
identifier[self] . identifier[_cache_set] ( identifier[key] , identifier[html] )
keyword[except] :
identifier[self] . identifier[exception] ( literal[string] )
keyword[return]
identifier[self] . identifier[update] ( identifier[url] , identifier[cache_info] ) | def put(self, url, html, cache_info=None):
"""
Put response into cache
:param url: Url to cache
:type url: str | unicode
:param html: HTML content of url
:type html: str | unicode
:param cache_info: Cache Info (default: None)
:type cache_info: floscraper.models.CacheInfo
:rtype: None
"""
key = hashlib.md5(url).hexdigest()
try:
self._cache_set(key, html) # depends on [control=['try'], data=[]]
except:
self.exception('Failed to write cache')
return # depends on [control=['except'], data=[]]
self.update(url, cache_info) |
def do_trace(self, arg):
"""
t - trace at the current assembly instruction
trace - trace at the current assembly instruction
"""
if arg: # XXX this check is to be removed
raise CmdError("too many arguments")
if self.lastEvent is None:
raise CmdError("no current thread set")
self.lastEvent.get_thread().set_tf()
return True | def function[do_trace, parameter[self, arg]]:
constant[
t - trace at the current assembly instruction
trace - trace at the current assembly instruction
]
if name[arg] begin[:]
<ast.Raise object at 0x7da18c4cf2e0>
if compare[name[self].lastEvent is constant[None]] begin[:]
<ast.Raise object at 0x7da18c4ce920>
call[call[name[self].lastEvent.get_thread, parameter[]].set_tf, parameter[]]
return[constant[True]] | keyword[def] identifier[do_trace] ( identifier[self] , identifier[arg] ):
literal[string]
keyword[if] identifier[arg] :
keyword[raise] identifier[CmdError] ( literal[string] )
keyword[if] identifier[self] . identifier[lastEvent] keyword[is] keyword[None] :
keyword[raise] identifier[CmdError] ( literal[string] )
identifier[self] . identifier[lastEvent] . identifier[get_thread] (). identifier[set_tf] ()
keyword[return] keyword[True] | def do_trace(self, arg):
"""
t - trace at the current assembly instruction
trace - trace at the current assembly instruction
"""
if arg: # XXX this check is to be removed
raise CmdError('too many arguments') # depends on [control=['if'], data=[]]
if self.lastEvent is None:
raise CmdError('no current thread set') # depends on [control=['if'], data=[]]
self.lastEvent.get_thread().set_tf()
return True |
def _print_percent(self):
'''Print how much is done in percentage.'''
fraction_done = ((self.continue_value or 0 + self.current_value) /
self.max_value)
self._print('{fraction_done:.1%}'.format(fraction_done=fraction_done)) | def function[_print_percent, parameter[self]]:
constant[Print how much is done in percentage.]
variable[fraction_done] assign[=] binary_operation[<ast.BoolOp object at 0x7da18ede4b50> / name[self].max_value]
call[name[self]._print, parameter[call[constant[{fraction_done:.1%}].format, parameter[]]]] | keyword[def] identifier[_print_percent] ( identifier[self] ):
literal[string]
identifier[fraction_done] =(( identifier[self] . identifier[continue_value] keyword[or] literal[int] + identifier[self] . identifier[current_value] )/
identifier[self] . identifier[max_value] )
identifier[self] . identifier[_print] ( literal[string] . identifier[format] ( identifier[fraction_done] = identifier[fraction_done] )) | def _print_percent(self):
"""Print how much is done in percentage."""
fraction_done = (self.continue_value or 0 + self.current_value) / self.max_value
self._print('{fraction_done:.1%}'.format(fraction_done=fraction_done)) |
def parse(self):
"""
Parses a backup file header. Will be done automatically if
used together with the 'with' statement
"""
self.fp.seek(0)
magic = self.fp.readline()
assert magic == b'ANDROID BACKUP\n'
self.version = int(self.fp.readline().strip())
self.compression = CompressionType(int(self.fp.readline().strip()))
self.encryption = EncryptionType(self.fp.readline().strip().decode())
self.__data_start = self.fp.tell() | def function[parse, parameter[self]]:
constant[
Parses a backup file header. Will be done automatically if
used together with the 'with' statement
]
call[name[self].fp.seek, parameter[constant[0]]]
variable[magic] assign[=] call[name[self].fp.readline, parameter[]]
assert[compare[name[magic] equal[==] constant[b'ANDROID BACKUP\n']]]
name[self].version assign[=] call[name[int], parameter[call[call[name[self].fp.readline, parameter[]].strip, parameter[]]]]
name[self].compression assign[=] call[name[CompressionType], parameter[call[name[int], parameter[call[call[name[self].fp.readline, parameter[]].strip, parameter[]]]]]]
name[self].encryption assign[=] call[name[EncryptionType], parameter[call[call[call[name[self].fp.readline, parameter[]].strip, parameter[]].decode, parameter[]]]]
name[self].__data_start assign[=] call[name[self].fp.tell, parameter[]] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
identifier[self] . identifier[fp] . identifier[seek] ( literal[int] )
identifier[magic] = identifier[self] . identifier[fp] . identifier[readline] ()
keyword[assert] identifier[magic] == literal[string]
identifier[self] . identifier[version] = identifier[int] ( identifier[self] . identifier[fp] . identifier[readline] (). identifier[strip] ())
identifier[self] . identifier[compression] = identifier[CompressionType] ( identifier[int] ( identifier[self] . identifier[fp] . identifier[readline] (). identifier[strip] ()))
identifier[self] . identifier[encryption] = identifier[EncryptionType] ( identifier[self] . identifier[fp] . identifier[readline] (). identifier[strip] (). identifier[decode] ())
identifier[self] . identifier[__data_start] = identifier[self] . identifier[fp] . identifier[tell] () | def parse(self):
"""
Parses a backup file header. Will be done automatically if
used together with the 'with' statement
"""
self.fp.seek(0)
magic = self.fp.readline()
assert magic == b'ANDROID BACKUP\n'
self.version = int(self.fp.readline().strip())
self.compression = CompressionType(int(self.fp.readline().strip()))
self.encryption = EncryptionType(self.fp.readline().strip().decode())
self.__data_start = self.fp.tell() |
def rebuild_tree(cls, session, tree_id):
""" This method rebuid tree.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session
tree_id (int or str): id of tree
Example:
* :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_rebuild`
"""
session.query(cls).filter_by(tree_id=tree_id)\
.update({cls.left: 0, cls.right: 0, cls.level: 0})
top = session.query(cls).filter_by(parent_id=None)\
.filter_by(tree_id=tree_id).one()
top.left = left = 1
top.right = right = 2
top.level = level = cls.get_default_level()
def recursive(children, left, right, level):
level = level + 1
for i, node in enumerate(children):
same_level_right = children[i - 1].right
left = left + 1
if i > 0:
left = left + 1
if same_level_right:
left = same_level_right + 1
right = left + 1
node.left = left
node.right = right
parent = node.parent
j = 0
while parent:
parent.right = right + 1 + j
parent = parent.parent
j += 1
node.level = level
recursive(node.children, left, right, level)
recursive(top.children, left, right, level) | def function[rebuild_tree, parameter[cls, session, tree_id]]:
constant[ This method rebuid tree.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session
tree_id (int or str): id of tree
Example:
* :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_rebuild`
]
call[call[call[name[session].query, parameter[name[cls]]].filter_by, parameter[]].update, parameter[dictionary[[<ast.Attribute object at 0x7da1b10e7850>, <ast.Attribute object at 0x7da1b10e6140>, <ast.Attribute object at 0x7da1b10e68c0>], [<ast.Constant object at 0x7da1b10e7430>, <ast.Constant object at 0x7da1b10e7370>, <ast.Constant object at 0x7da1b10e5de0>]]]]
variable[top] assign[=] call[call[call[call[name[session].query, parameter[name[cls]]].filter_by, parameter[]].filter_by, parameter[]].one, parameter[]]
name[top].left assign[=] constant[1]
name[top].right assign[=] constant[2]
name[top].level assign[=] call[name[cls].get_default_level, parameter[]]
def function[recursive, parameter[children, left, right, level]]:
variable[level] assign[=] binary_operation[name[level] + constant[1]]
for taget[tuple[[<ast.Name object at 0x7da1b10e54b0>, <ast.Name object at 0x7da1b10e4fa0>]]] in starred[call[name[enumerate], parameter[name[children]]]] begin[:]
variable[same_level_right] assign[=] call[name[children]][binary_operation[name[i] - constant[1]]].right
variable[left] assign[=] binary_operation[name[left] + constant[1]]
if compare[name[i] greater[>] constant[0]] begin[:]
variable[left] assign[=] binary_operation[name[left] + constant[1]]
if name[same_level_right] begin[:]
variable[left] assign[=] binary_operation[name[same_level_right] + constant[1]]
variable[right] assign[=] binary_operation[name[left] + constant[1]]
name[node].left assign[=] name[left]
name[node].right assign[=] name[right]
variable[parent] assign[=] name[node].parent
variable[j] assign[=] constant[0]
while name[parent] begin[:]
name[parent].right assign[=] binary_operation[binary_operation[name[right] + constant[1]] + name[j]]
variable[parent] assign[=] name[parent].parent
<ast.AugAssign object at 0x7da1b10e7190>
name[node].level assign[=] name[level]
call[name[recursive], parameter[name[node].children, name[left], name[right], name[level]]]
call[name[recursive], parameter[name[top].children, name[left], name[right], name[level]]] | keyword[def] identifier[rebuild_tree] ( identifier[cls] , identifier[session] , identifier[tree_id] ):
literal[string]
identifier[session] . identifier[query] ( identifier[cls] ). identifier[filter_by] ( identifier[tree_id] = identifier[tree_id] ). identifier[update] ({ identifier[cls] . identifier[left] : literal[int] , identifier[cls] . identifier[right] : literal[int] , identifier[cls] . identifier[level] : literal[int] })
identifier[top] = identifier[session] . identifier[query] ( identifier[cls] ). identifier[filter_by] ( identifier[parent_id] = keyword[None] ). identifier[filter_by] ( identifier[tree_id] = identifier[tree_id] ). identifier[one] ()
identifier[top] . identifier[left] = identifier[left] = literal[int]
identifier[top] . identifier[right] = identifier[right] = literal[int]
identifier[top] . identifier[level] = identifier[level] = identifier[cls] . identifier[get_default_level] ()
keyword[def] identifier[recursive] ( identifier[children] , identifier[left] , identifier[right] , identifier[level] ):
identifier[level] = identifier[level] + literal[int]
keyword[for] identifier[i] , identifier[node] keyword[in] identifier[enumerate] ( identifier[children] ):
identifier[same_level_right] = identifier[children] [ identifier[i] - literal[int] ]. identifier[right]
identifier[left] = identifier[left] + literal[int]
keyword[if] identifier[i] > literal[int] :
identifier[left] = identifier[left] + literal[int]
keyword[if] identifier[same_level_right] :
identifier[left] = identifier[same_level_right] + literal[int]
identifier[right] = identifier[left] + literal[int]
identifier[node] . identifier[left] = identifier[left]
identifier[node] . identifier[right] = identifier[right]
identifier[parent] = identifier[node] . identifier[parent]
identifier[j] = literal[int]
keyword[while] identifier[parent] :
identifier[parent] . identifier[right] = identifier[right] + literal[int] + identifier[j]
identifier[parent] = identifier[parent] . identifier[parent]
identifier[j] += literal[int]
identifier[node] . identifier[level] = identifier[level]
identifier[recursive] ( identifier[node] . identifier[children] , identifier[left] , identifier[right] , identifier[level] )
identifier[recursive] ( identifier[top] . identifier[children] , identifier[left] , identifier[right] , identifier[level] ) | def rebuild_tree(cls, session, tree_id):
""" This method rebuid tree.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session
tree_id (int or str): id of tree
Example:
* :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_rebuild`
"""
session.query(cls).filter_by(tree_id=tree_id).update({cls.left: 0, cls.right: 0, cls.level: 0})
top = session.query(cls).filter_by(parent_id=None).filter_by(tree_id=tree_id).one()
top.left = left = 1
top.right = right = 2
top.level = level = cls.get_default_level()
def recursive(children, left, right, level):
level = level + 1
for (i, node) in enumerate(children):
same_level_right = children[i - 1].right
left = left + 1
if i > 0:
left = left + 1 # depends on [control=['if'], data=[]]
if same_level_right:
left = same_level_right + 1 # depends on [control=['if'], data=[]]
right = left + 1
node.left = left
node.right = right
parent = node.parent
j = 0
while parent:
parent.right = right + 1 + j
parent = parent.parent
j += 1 # depends on [control=['while'], data=[]]
node.level = level
recursive(node.children, left, right, level) # depends on [control=['for'], data=[]]
recursive(top.children, left, right, level) |
async def shutdown(sig, loop):
"""Gracefully cancel current tasks when app receives a shutdown signal."""
logging.info(f'Received exit signal {sig.name}...')
tasks = [task for task in asyncio.Task.all_tasks() if task is not
asyncio.tasks.Task.current_task()]
for task in tasks:
logging.debug(f'Cancelling task: {task}')
task.cancel()
results = await asyncio.gather(*tasks, return_exceptions=True)
logging.debug(f'Done awaiting cancelled tasks, results: {results}')
loop.stop()
logging.info('Shutdown complete.') | <ast.AsyncFunctionDef object at 0x7da1b0658910> | keyword[async] keyword[def] identifier[shutdown] ( identifier[sig] , identifier[loop] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
identifier[tasks] =[ identifier[task] keyword[for] identifier[task] keyword[in] identifier[asyncio] . identifier[Task] . identifier[all_tasks] () keyword[if] identifier[task] keyword[is] keyword[not]
identifier[asyncio] . identifier[tasks] . identifier[Task] . identifier[current_task] ()]
keyword[for] identifier[task] keyword[in] identifier[tasks] :
identifier[logging] . identifier[debug] ( literal[string] )
identifier[task] . identifier[cancel] ()
identifier[results] = keyword[await] identifier[asyncio] . identifier[gather] (* identifier[tasks] , identifier[return_exceptions] = keyword[True] )
identifier[logging] . identifier[debug] ( literal[string] )
identifier[loop] . identifier[stop] ()
identifier[logging] . identifier[info] ( literal[string] ) | async def shutdown(sig, loop):
"""Gracefully cancel current tasks when app receives a shutdown signal."""
logging.info(f'Received exit signal {sig.name}...')
tasks = [task for task in asyncio.Task.all_tasks() if task is not asyncio.tasks.Task.current_task()]
for task in tasks:
logging.debug(f'Cancelling task: {task}')
task.cancel() # depends on [control=['for'], data=['task']]
results = await asyncio.gather(*tasks, return_exceptions=True)
logging.debug(f'Done awaiting cancelled tasks, results: {results}')
loop.stop()
logging.info('Shutdown complete.') |
def read(file_name=None, is_encoding=True, ignore_raises=False):
"""Read file"""
if file_name is None:
raise Exception("File name not provided")
if ignore_raises:
try:
return read_file(is_encoding=is_encoding,
file_path=path_format(
file_path=CURR_PATH,
file_name=file_name,
ignore_raises=ignore_raises))
except Exception:
# TODO: not silence like this,
# must be on setup.cfg, README path
return 'NOTFOUND'
return read_file(is_encoding=is_encoding,
file_path=path_format(
file_path=CURR_PATH,
file_name=file_name,
ignore_raises=ignore_raises)) | def function[read, parameter[file_name, is_encoding, ignore_raises]]:
constant[Read file]
if compare[name[file_name] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0a600d0>
if name[ignore_raises] begin[:]
<ast.Try object at 0x7da1b0a63fa0>
return[call[name[read_file], parameter[]]] | keyword[def] identifier[read] ( identifier[file_name] = keyword[None] , identifier[is_encoding] = keyword[True] , identifier[ignore_raises] = keyword[False] ):
literal[string]
keyword[if] identifier[file_name] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[ignore_raises] :
keyword[try] :
keyword[return] identifier[read_file] ( identifier[is_encoding] = identifier[is_encoding] ,
identifier[file_path] = identifier[path_format] (
identifier[file_path] = identifier[CURR_PATH] ,
identifier[file_name] = identifier[file_name] ,
identifier[ignore_raises] = identifier[ignore_raises] ))
keyword[except] identifier[Exception] :
keyword[return] literal[string]
keyword[return] identifier[read_file] ( identifier[is_encoding] = identifier[is_encoding] ,
identifier[file_path] = identifier[path_format] (
identifier[file_path] = identifier[CURR_PATH] ,
identifier[file_name] = identifier[file_name] ,
identifier[ignore_raises] = identifier[ignore_raises] )) | def read(file_name=None, is_encoding=True, ignore_raises=False):
"""Read file"""
if file_name is None:
raise Exception('File name not provided') # depends on [control=['if'], data=[]]
if ignore_raises:
try:
return read_file(is_encoding=is_encoding, file_path=path_format(file_path=CURR_PATH, file_name=file_name, ignore_raises=ignore_raises)) # depends on [control=['try'], data=[]]
except Exception:
# TODO: not silence like this,
# must be on setup.cfg, README path
return 'NOTFOUND' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return read_file(is_encoding=is_encoding, file_path=path_format(file_path=CURR_PATH, file_name=file_name, ignore_raises=ignore_raises)) |
def keyword(self) -> Tuple[Optional[str], str]:
"""Parse a YANG statement keyword.
Raises:
EndOfInput: If past the end of input.
UnexpectedInput: If no syntactically correct keyword is found.
"""
i1 = self.yang_identifier()
if self.peek() == ":":
self.offset += 1
i2 = self.yang_identifier()
return (i1, i2)
return (None, i1) | def function[keyword, parameter[self]]:
constant[Parse a YANG statement keyword.
Raises:
EndOfInput: If past the end of input.
UnexpectedInput: If no syntactically correct keyword is found.
]
variable[i1] assign[=] call[name[self].yang_identifier, parameter[]]
if compare[call[name[self].peek, parameter[]] equal[==] constant[:]] begin[:]
<ast.AugAssign object at 0x7da1b0559cc0>
variable[i2] assign[=] call[name[self].yang_identifier, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b02e6da0>, <ast.Name object at 0x7da1b02e68f0>]]]
return[tuple[[<ast.Constant object at 0x7da1b04922c0>, <ast.Name object at 0x7da1b0492230>]]] | keyword[def] identifier[keyword] ( identifier[self] )-> identifier[Tuple] [ identifier[Optional] [ identifier[str] ], identifier[str] ]:
literal[string]
identifier[i1] = identifier[self] . identifier[yang_identifier] ()
keyword[if] identifier[self] . identifier[peek] ()== literal[string] :
identifier[self] . identifier[offset] += literal[int]
identifier[i2] = identifier[self] . identifier[yang_identifier] ()
keyword[return] ( identifier[i1] , identifier[i2] )
keyword[return] ( keyword[None] , identifier[i1] ) | def keyword(self) -> Tuple[Optional[str], str]:
"""Parse a YANG statement keyword.
Raises:
EndOfInput: If past the end of input.
UnexpectedInput: If no syntactically correct keyword is found.
"""
i1 = self.yang_identifier()
if self.peek() == ':':
self.offset += 1
i2 = self.yang_identifier()
return (i1, i2) # depends on [control=['if'], data=[]]
return (None, i1) |
def set_logging_level(cl_args):
"""simply set verbose level based on command-line args
:param cl_args: CLI arguments
:type cl_args: dict
:return: None
:rtype: None
"""
if 'verbose' in cl_args and cl_args['verbose']:
configure(logging.DEBUG)
else:
configure(logging.INFO) | def function[set_logging_level, parameter[cl_args]]:
constant[simply set verbose level based on command-line args
:param cl_args: CLI arguments
:type cl_args: dict
:return: None
:rtype: None
]
if <ast.BoolOp object at 0x7da18c4cfdc0> begin[:]
call[name[configure], parameter[name[logging].DEBUG]] | keyword[def] identifier[set_logging_level] ( identifier[cl_args] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[cl_args] keyword[and] identifier[cl_args] [ literal[string] ]:
identifier[configure] ( identifier[logging] . identifier[DEBUG] )
keyword[else] :
identifier[configure] ( identifier[logging] . identifier[INFO] ) | def set_logging_level(cl_args):
"""simply set verbose level based on command-line args
:param cl_args: CLI arguments
:type cl_args: dict
:return: None
:rtype: None
"""
if 'verbose' in cl_args and cl_args['verbose']:
configure(logging.DEBUG) # depends on [control=['if'], data=[]]
else:
configure(logging.INFO) |
def set_providers(self, *providers):
"""Replace current providers with given ones"""
if self.providers:
self.clear()
for provider in providers:
self.add(provider) | def function[set_providers, parameter[self]]:
constant[Replace current providers with given ones]
if name[self].providers begin[:]
call[name[self].clear, parameter[]]
for taget[name[provider]] in starred[name[providers]] begin[:]
call[name[self].add, parameter[name[provider]]] | keyword[def] identifier[set_providers] ( identifier[self] ,* identifier[providers] ):
literal[string]
keyword[if] identifier[self] . identifier[providers] :
identifier[self] . identifier[clear] ()
keyword[for] identifier[provider] keyword[in] identifier[providers] :
identifier[self] . identifier[add] ( identifier[provider] ) | def set_providers(self, *providers):
"""Replace current providers with given ones"""
if self.providers:
self.clear() # depends on [control=['if'], data=[]]
for provider in providers:
self.add(provider) # depends on [control=['for'], data=['provider']] |
def filter_record(records):
"""
Filter records and remove items with missing or inconsistent fields
Parameters
----------
records : list
A list of Record objects
Returns
-------
records, ignored : (Record list, dict)
A tuple of filtered records, and a dictionary counting the
missings fields
"""
def scheme(r):
if r.interaction is None:
call_duration_ok = True
elif r.interaction == 'call':
call_duration_ok = isinstance(r.call_duration, (int, float))
else:
call_duration_ok = True
callandtext = r.interaction in ['call', 'text']
not_callandtext = not callandtext
return {
'interaction': r.interaction in ['call', 'text', 'gps', None],
'direction': (not_callandtext and r.direction is None) or r.direction in ['in', 'out'],
'correspondent_id': not_callandtext or (r.correspondent_id not in [None, '']),
'datetime': isinstance(r.datetime, datetime),
'call_duration': call_duration_ok,
'location': callandtext or r.position.type() is not None
}
ignored = OrderedDict([
('all', 0),
('interaction', 0),
('direction', 0),
('correspondent_id', 0),
('datetime', 0),
('call_duration', 0),
('location', 0),
])
bad_records = []
def _filter(records):
for r in records:
valid = True
for key, valid_key in scheme(r).items():
if not valid_key:
ignored[key] += 1
bad_records.append(r)
# Not breaking, to count all fields with errors
valid = False
if valid:
yield r
else:
ignored['all'] += 1
return list(_filter(records)), ignored, bad_records | def function[filter_record, parameter[records]]:
constant[
Filter records and remove items with missing or inconsistent fields
Parameters
----------
records : list
A list of Record objects
Returns
-------
records, ignored : (Record list, dict)
A tuple of filtered records, and a dictionary counting the
missings fields
]
def function[scheme, parameter[r]]:
if compare[name[r].interaction is constant[None]] begin[:]
variable[call_duration_ok] assign[=] constant[True]
variable[callandtext] assign[=] compare[name[r].interaction in list[[<ast.Constant object at 0x7da1b0dc0640>, <ast.Constant object at 0x7da1b0dc28f0>]]]
variable[not_callandtext] assign[=] <ast.UnaryOp object at 0x7da1b0dc3940>
return[dictionary[[<ast.Constant object at 0x7da1b0dc2aa0>, <ast.Constant object at 0x7da1b0dc1180>, <ast.Constant object at 0x7da1b0dc23b0>, <ast.Constant object at 0x7da1b0dc34f0>, <ast.Constant object at 0x7da1b0dc1ab0>, <ast.Constant object at 0x7da1b0dc0f40>], [<ast.Compare object at 0x7da1b0dc3fd0>, <ast.BoolOp object at 0x7da1b0dc2ad0>, <ast.BoolOp object at 0x7da1b0dc0280>, <ast.Call object at 0x7da1b0dc1090>, <ast.Name object at 0x7da1b0dc2f80>, <ast.BoolOp object at 0x7da1b0dc3df0>]]]
variable[ignored] assign[=] call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da1b0dc3730>, <ast.Tuple object at 0x7da1b0dc3700>, <ast.Tuple object at 0x7da1b0dc1570>, <ast.Tuple object at 0x7da1b0dc0f10>, <ast.Tuple object at 0x7da1b0dc2c20>, <ast.Tuple object at 0x7da1b0dc1030>, <ast.Tuple object at 0x7da1b0dc1de0>]]]]
variable[bad_records] assign[=] list[[]]
def function[_filter, parameter[records]]:
for taget[name[r]] in starred[name[records]] begin[:]
variable[valid] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da18dc052a0>, <ast.Name object at 0x7da18dc076d0>]]] in starred[call[call[name[scheme], parameter[name[r]]].items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18dc063b0> begin[:]
<ast.AugAssign object at 0x7da18dc04130>
call[name[bad_records].append, parameter[name[r]]]
variable[valid] assign[=] constant[False]
if name[valid] begin[:]
<ast.Yield object at 0x7da18dc06230>
return[tuple[[<ast.Call object at 0x7da18dc05d80>, <ast.Name object at 0x7da18dc07d30>, <ast.Name object at 0x7da18dc05270>]]] | keyword[def] identifier[filter_record] ( identifier[records] ):
literal[string]
keyword[def] identifier[scheme] ( identifier[r] ):
keyword[if] identifier[r] . identifier[interaction] keyword[is] keyword[None] :
identifier[call_duration_ok] = keyword[True]
keyword[elif] identifier[r] . identifier[interaction] == literal[string] :
identifier[call_duration_ok] = identifier[isinstance] ( identifier[r] . identifier[call_duration] ,( identifier[int] , identifier[float] ))
keyword[else] :
identifier[call_duration_ok] = keyword[True]
identifier[callandtext] = identifier[r] . identifier[interaction] keyword[in] [ literal[string] , literal[string] ]
identifier[not_callandtext] = keyword[not] identifier[callandtext]
keyword[return] {
literal[string] : identifier[r] . identifier[interaction] keyword[in] [ literal[string] , literal[string] , literal[string] , keyword[None] ],
literal[string] :( identifier[not_callandtext] keyword[and] identifier[r] . identifier[direction] keyword[is] keyword[None] ) keyword[or] identifier[r] . identifier[direction] keyword[in] [ literal[string] , literal[string] ],
literal[string] : identifier[not_callandtext] keyword[or] ( identifier[r] . identifier[correspondent_id] keyword[not] keyword[in] [ keyword[None] , literal[string] ]),
literal[string] : identifier[isinstance] ( identifier[r] . identifier[datetime] , identifier[datetime] ),
literal[string] : identifier[call_duration_ok] ,
literal[string] : identifier[callandtext] keyword[or] identifier[r] . identifier[position] . identifier[type] () keyword[is] keyword[not] keyword[None]
}
identifier[ignored] = identifier[OrderedDict] ([
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
( literal[string] , literal[int] ),
])
identifier[bad_records] =[]
keyword[def] identifier[_filter] ( identifier[records] ):
keyword[for] identifier[r] keyword[in] identifier[records] :
identifier[valid] = keyword[True]
keyword[for] identifier[key] , identifier[valid_key] keyword[in] identifier[scheme] ( identifier[r] ). identifier[items] ():
keyword[if] keyword[not] identifier[valid_key] :
identifier[ignored] [ identifier[key] ]+= literal[int]
identifier[bad_records] . identifier[append] ( identifier[r] )
identifier[valid] = keyword[False]
keyword[if] identifier[valid] :
keyword[yield] identifier[r]
keyword[else] :
identifier[ignored] [ literal[string] ]+= literal[int]
keyword[return] identifier[list] ( identifier[_filter] ( identifier[records] )), identifier[ignored] , identifier[bad_records] | def filter_record(records):
"""
Filter records and remove items with missing or inconsistent fields
Parameters
----------
records : list
A list of Record objects
Returns
-------
records, ignored : (Record list, dict)
A tuple of filtered records, and a dictionary counting the
missings fields
"""
def scheme(r):
if r.interaction is None:
call_duration_ok = True # depends on [control=['if'], data=[]]
elif r.interaction == 'call':
call_duration_ok = isinstance(r.call_duration, (int, float)) # depends on [control=['if'], data=[]]
else:
call_duration_ok = True
callandtext = r.interaction in ['call', 'text']
not_callandtext = not callandtext
return {'interaction': r.interaction in ['call', 'text', 'gps', None], 'direction': not_callandtext and r.direction is None or r.direction in ['in', 'out'], 'correspondent_id': not_callandtext or r.correspondent_id not in [None, ''], 'datetime': isinstance(r.datetime, datetime), 'call_duration': call_duration_ok, 'location': callandtext or r.position.type() is not None}
ignored = OrderedDict([('all', 0), ('interaction', 0), ('direction', 0), ('correspondent_id', 0), ('datetime', 0), ('call_duration', 0), ('location', 0)])
bad_records = []
def _filter(records):
for r in records:
valid = True
for (key, valid_key) in scheme(r).items():
if not valid_key:
ignored[key] += 1
bad_records.append(r)
# Not breaking, to count all fields with errors
valid = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if valid:
yield r # depends on [control=['if'], data=[]]
else:
ignored['all'] += 1 # depends on [control=['for'], data=['r']]
return (list(_filter(records)), ignored, bad_records) |
def _get_output(self, a, image):
""" Looks up the precomputed adversarial image for a given image.
"""
sd = np.square(self._input_images - image)
mses = np.mean(sd, axis=tuple(range(1, sd.ndim)))
index = np.argmin(mses)
# if we run into numerical problems with this approach, we might
# need to add a very tiny threshold here
if mses[index] > 0:
raise ValueError('No precomputed output image for this image')
return self._output_images[index] | def function[_get_output, parameter[self, a, image]]:
constant[ Looks up the precomputed adversarial image for a given image.
]
variable[sd] assign[=] call[name[np].square, parameter[binary_operation[name[self]._input_images - name[image]]]]
variable[mses] assign[=] call[name[np].mean, parameter[name[sd]]]
variable[index] assign[=] call[name[np].argmin, parameter[name[mses]]]
if compare[call[name[mses]][name[index]] greater[>] constant[0]] begin[:]
<ast.Raise object at 0x7da18dc99090>
return[call[name[self]._output_images][name[index]]] | keyword[def] identifier[_get_output] ( identifier[self] , identifier[a] , identifier[image] ):
literal[string]
identifier[sd] = identifier[np] . identifier[square] ( identifier[self] . identifier[_input_images] - identifier[image] )
identifier[mses] = identifier[np] . identifier[mean] ( identifier[sd] , identifier[axis] = identifier[tuple] ( identifier[range] ( literal[int] , identifier[sd] . identifier[ndim] )))
identifier[index] = identifier[np] . identifier[argmin] ( identifier[mses] )
keyword[if] identifier[mses] [ identifier[index] ]> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[_output_images] [ identifier[index] ] | def _get_output(self, a, image):
""" Looks up the precomputed adversarial image for a given image.
"""
sd = np.square(self._input_images - image)
mses = np.mean(sd, axis=tuple(range(1, sd.ndim)))
index = np.argmin(mses)
# if we run into numerical problems with this approach, we might
# need to add a very tiny threshold here
if mses[index] > 0:
raise ValueError('No precomputed output image for this image') # depends on [control=['if'], data=[]]
return self._output_images[index] |
def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave (colour) planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the `ipsize` elements of
data from each pixel in `ipixels` followed by the `apsize` elements
of data from each pixel in `apixels`. Conventionally `ipixels`
and `apixels` are byte arrays so the sizes are bytes, but it
actually works with any arrays of the same type. The returned
array is the same type as the input arrays which should be the
same type as each other.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
# See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356
out = array(ipixels.typecode)
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
for i in range(apsize):
out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
return out | def function[interleave_planes, parameter[ipixels, apixels, ipsize, apsize]]:
constant[
Interleave (colour) planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the `ipsize` elements of
data from each pixel in `ipixels` followed by the `apsize` elements
of data from each pixel in `apixels`. Conventionally `ipixels`
and `apixels` are byte arrays so the sizes are bytes, but it
actually works with any arrays of the same type. The returned
array is the same type as the input arrays which should be the
same type as each other.
]
variable[itotal] assign[=] call[name[len], parameter[name[ipixels]]]
variable[atotal] assign[=] call[name[len], parameter[name[apixels]]]
variable[newtotal] assign[=] binary_operation[name[itotal] + name[atotal]]
variable[newpsize] assign[=] binary_operation[name[ipsize] + name[apsize]]
variable[out] assign[=] call[name[array], parameter[name[ipixels].typecode]]
call[name[out].extend, parameter[name[ipixels]]]
call[name[out].extend, parameter[name[apixels]]]
for taget[name[i]] in starred[call[name[range], parameter[name[ipsize]]]] begin[:]
call[name[out]][<ast.Slice object at 0x7da1b0ea0cd0>] assign[=] call[name[ipixels]][<ast.Slice object at 0x7da1b0ea0790>]
for taget[name[i]] in starred[call[name[range], parameter[name[apsize]]]] begin[:]
call[name[out]][<ast.Slice object at 0x7da1b0e74130>] assign[=] call[name[apixels]][<ast.Slice object at 0x7da1b0e741f0>]
return[name[out]] | keyword[def] identifier[interleave_planes] ( identifier[ipixels] , identifier[apixels] , identifier[ipsize] , identifier[apsize] ):
literal[string]
identifier[itotal] = identifier[len] ( identifier[ipixels] )
identifier[atotal] = identifier[len] ( identifier[apixels] )
identifier[newtotal] = identifier[itotal] + identifier[atotal]
identifier[newpsize] = identifier[ipsize] + identifier[apsize]
identifier[out] = identifier[array] ( identifier[ipixels] . identifier[typecode] )
identifier[out] . identifier[extend] ( identifier[ipixels] )
identifier[out] . identifier[extend] ( identifier[apixels] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ipsize] ):
identifier[out] [ identifier[i] : identifier[newtotal] : identifier[newpsize] ]= identifier[ipixels] [ identifier[i] : identifier[itotal] : identifier[ipsize] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[apsize] ):
identifier[out] [ identifier[i] + identifier[ipsize] : identifier[newtotal] : identifier[newpsize] ]= identifier[apixels] [ identifier[i] : identifier[atotal] : identifier[apsize] ]
keyword[return] identifier[out] | def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave (colour) planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the `ipsize` elements of
data from each pixel in `ipixels` followed by the `apsize` elements
of data from each pixel in `apixels`. Conventionally `ipixels`
and `apixels` are byte arrays so the sizes are bytes, but it
actually works with any arrays of the same type. The returned
array is the same type as the input arrays which should be the
same type as each other.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
# See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356
out = array(ipixels.typecode)
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize] # depends on [control=['for'], data=['i']]
for i in range(apsize):
out[i + ipsize:newtotal:newpsize] = apixels[i:atotal:apsize] # depends on [control=['for'], data=['i']]
return out |
def find_threads_by_name(self, name, bExactMatch = True):
"""
Find threads by name, using different search methods.
@type name: str, None
@param name: Name to look for. Use C{None} to find nameless threads.
@type bExactMatch: bool
@param bExactMatch: C{True} if the name must be
B{exactly} as given, C{False} if the name can be
loosely matched.
This parameter is ignored when C{name} is C{None}.
@rtype: list( L{Thread} )
@return: All threads matching the given name.
"""
found_threads = list()
# Find threads with no name.
if name is None:
for aThread in self.iter_threads():
if aThread.get_name() is None:
found_threads.append(aThread)
# Find threads matching the given name exactly.
elif bExactMatch:
for aThread in self.iter_threads():
if aThread.get_name() == name:
found_threads.append(aThread)
# Find threads whose names match the given substring.
else:
for aThread in self.iter_threads():
t_name = aThread.get_name()
if t_name is not None and name in t_name:
found_threads.append(aThread)
return found_threads | def function[find_threads_by_name, parameter[self, name, bExactMatch]]:
constant[
Find threads by name, using different search methods.
@type name: str, None
@param name: Name to look for. Use C{None} to find nameless threads.
@type bExactMatch: bool
@param bExactMatch: C{True} if the name must be
B{exactly} as given, C{False} if the name can be
loosely matched.
This parameter is ignored when C{name} is C{None}.
@rtype: list( L{Thread} )
@return: All threads matching the given name.
]
variable[found_threads] assign[=] call[name[list], parameter[]]
if compare[name[name] is constant[None]] begin[:]
for taget[name[aThread]] in starred[call[name[self].iter_threads, parameter[]]] begin[:]
if compare[call[name[aThread].get_name, parameter[]] is constant[None]] begin[:]
call[name[found_threads].append, parameter[name[aThread]]]
return[name[found_threads]] | keyword[def] identifier[find_threads_by_name] ( identifier[self] , identifier[name] , identifier[bExactMatch] = keyword[True] ):
literal[string]
identifier[found_threads] = identifier[list] ()
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[for] identifier[aThread] keyword[in] identifier[self] . identifier[iter_threads] ():
keyword[if] identifier[aThread] . identifier[get_name] () keyword[is] keyword[None] :
identifier[found_threads] . identifier[append] ( identifier[aThread] )
keyword[elif] identifier[bExactMatch] :
keyword[for] identifier[aThread] keyword[in] identifier[self] . identifier[iter_threads] ():
keyword[if] identifier[aThread] . identifier[get_name] ()== identifier[name] :
identifier[found_threads] . identifier[append] ( identifier[aThread] )
keyword[else] :
keyword[for] identifier[aThread] keyword[in] identifier[self] . identifier[iter_threads] ():
identifier[t_name] = identifier[aThread] . identifier[get_name] ()
keyword[if] identifier[t_name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[name] keyword[in] identifier[t_name] :
identifier[found_threads] . identifier[append] ( identifier[aThread] )
keyword[return] identifier[found_threads] | def find_threads_by_name(self, name, bExactMatch=True):
"""
Find threads by name, using different search methods.
@type name: str, None
@param name: Name to look for. Use C{None} to find nameless threads.
@type bExactMatch: bool
@param bExactMatch: C{True} if the name must be
B{exactly} as given, C{False} if the name can be
loosely matched.
This parameter is ignored when C{name} is C{None}.
@rtype: list( L{Thread} )
@return: All threads matching the given name.
"""
found_threads = list()
# Find threads with no name.
if name is None:
for aThread in self.iter_threads():
if aThread.get_name() is None:
found_threads.append(aThread) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['aThread']] # depends on [control=['if'], data=[]]
# Find threads matching the given name exactly.
elif bExactMatch:
for aThread in self.iter_threads():
if aThread.get_name() == name:
found_threads.append(aThread) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['aThread']] # depends on [control=['if'], data=[]]
else:
# Find threads whose names match the given substring.
for aThread in self.iter_threads():
t_name = aThread.get_name()
if t_name is not None and name in t_name:
found_threads.append(aThread) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['aThread']]
return found_threads |
def getLoader(*a, **kw):
"""
Deprecated. Don't use this.
"""
warn("xmantissa.publicweb.getLoader is deprecated, use "
"PrivateApplication.getDocFactory or SiteTemplateResolver."
"getDocFactory.", category=DeprecationWarning, stacklevel=2)
from xmantissa.webtheme import getLoader
return getLoader(*a, **kw) | def function[getLoader, parameter[]]:
constant[
Deprecated. Don't use this.
]
call[name[warn], parameter[constant[xmantissa.publicweb.getLoader is deprecated, use PrivateApplication.getDocFactory or SiteTemplateResolver.getDocFactory.]]]
from relative_module[xmantissa.webtheme] import module[getLoader]
return[call[name[getLoader], parameter[<ast.Starred object at 0x7da1b0a33c10>]]] | keyword[def] identifier[getLoader] (* identifier[a] ,** identifier[kw] ):
literal[string]
identifier[warn] ( literal[string]
literal[string]
literal[string] , identifier[category] = identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] )
keyword[from] identifier[xmantissa] . identifier[webtheme] keyword[import] identifier[getLoader]
keyword[return] identifier[getLoader] (* identifier[a] ,** identifier[kw] ) | def getLoader(*a, **kw):
"""
Deprecated. Don't use this.
"""
warn('xmantissa.publicweb.getLoader is deprecated, use PrivateApplication.getDocFactory or SiteTemplateResolver.getDocFactory.', category=DeprecationWarning, stacklevel=2)
from xmantissa.webtheme import getLoader
return getLoader(*a, **kw) |
def removeLayout(self, layout):
'''Iteratively remove graphical objects from layout.'''
for cnt in reversed(range(layout.count())):
item = layout.takeAt(cnt)
widget = item.widget()
if widget is not None:
widget.deleteLater()
else:
'''If sublayout encountered, iterate recursively.'''
self.removeLayout(item.layout()) | def function[removeLayout, parameter[self, layout]]:
constant[Iteratively remove graphical objects from layout.]
for taget[name[cnt]] in starred[call[name[reversed], parameter[call[name[range], parameter[call[name[layout].count, parameter[]]]]]]] begin[:]
variable[item] assign[=] call[name[layout].takeAt, parameter[name[cnt]]]
variable[widget] assign[=] call[name[item].widget, parameter[]]
if compare[name[widget] is_not constant[None]] begin[:]
call[name[widget].deleteLater, parameter[]] | keyword[def] identifier[removeLayout] ( identifier[self] , identifier[layout] ):
literal[string]
keyword[for] identifier[cnt] keyword[in] identifier[reversed] ( identifier[range] ( identifier[layout] . identifier[count] ())):
identifier[item] = identifier[layout] . identifier[takeAt] ( identifier[cnt] )
identifier[widget] = identifier[item] . identifier[widget] ()
keyword[if] identifier[widget] keyword[is] keyword[not] keyword[None] :
identifier[widget] . identifier[deleteLater] ()
keyword[else] :
literal[string]
identifier[self] . identifier[removeLayout] ( identifier[item] . identifier[layout] ()) | def removeLayout(self, layout):
"""Iteratively remove graphical objects from layout."""
for cnt in reversed(range(layout.count())):
item = layout.takeAt(cnt)
widget = item.widget()
if widget is not None:
widget.deleteLater() # depends on [control=['if'], data=['widget']]
else:
'If sublayout encountered, iterate recursively.'
self.removeLayout(item.layout()) # depends on [control=['for'], data=['cnt']] |
def url_escape(value, plus=True):
"""Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
quote = urllib_parse.quote_plus if plus else urllib_parse.quote
return quote(utf8(value)) | def function[url_escape, parameter[value, plus]]:
constant[Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
]
variable[quote] assign[=] <ast.IfExp object at 0x7da1b1111600>
return[call[name[quote], parameter[call[name[utf8], parameter[name[value]]]]]] | keyword[def] identifier[url_escape] ( identifier[value] , identifier[plus] = keyword[True] ):
literal[string]
identifier[quote] = identifier[urllib_parse] . identifier[quote_plus] keyword[if] identifier[plus] keyword[else] identifier[urllib_parse] . identifier[quote]
keyword[return] identifier[quote] ( identifier[utf8] ( identifier[value] )) | def url_escape(value, plus=True):
"""Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
quote = urllib_parse.quote_plus if plus else urllib_parse.quote
return quote(utf8(value)) |
def from_stream(
klass, stream, path=None, tabix_path=None, record_checks=None, parsed_samples=None
):
"""Create new :py:class:`Reader` from file
.. note::
If you use the ``parsed_samples`` feature and you write out
records then you must not change the ``FORMAT`` of the record.
:param stream: ``file``-like object to read from
:param path: optional string with path to store (for display only)
:param list record_checks: record checks to perform, can contain
'INFO' and 'FORMAT'
:param list parsed_samples: ``list`` of ``str`` values with names of
samples to parse call information for (for speedup); leave to
``None`` for ignoring
"""
record_checks = record_checks or []
if tabix_path and not path:
raise ValueError("Must give path if tabix_path is given")
return Reader(
stream=stream,
path=path,
tabix_path=tabix_path,
record_checks=record_checks,
parsed_samples=parsed_samples,
) | def function[from_stream, parameter[klass, stream, path, tabix_path, record_checks, parsed_samples]]:
constant[Create new :py:class:`Reader` from file
.. note::
If you use the ``parsed_samples`` feature and you write out
records then you must not change the ``FORMAT`` of the record.
:param stream: ``file``-like object to read from
:param path: optional string with path to store (for display only)
:param list record_checks: record checks to perform, can contain
'INFO' and 'FORMAT'
:param list parsed_samples: ``list`` of ``str`` values with names of
samples to parse call information for (for speedup); leave to
``None`` for ignoring
]
variable[record_checks] assign[=] <ast.BoolOp object at 0x7da18f58f6a0>
if <ast.BoolOp object at 0x7da18f58e770> begin[:]
<ast.Raise object at 0x7da18f00d930>
return[call[name[Reader], parameter[]]] | keyword[def] identifier[from_stream] (
identifier[klass] , identifier[stream] , identifier[path] = keyword[None] , identifier[tabix_path] = keyword[None] , identifier[record_checks] = keyword[None] , identifier[parsed_samples] = keyword[None]
):
literal[string]
identifier[record_checks] = identifier[record_checks] keyword[or] []
keyword[if] identifier[tabix_path] keyword[and] keyword[not] identifier[path] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[Reader] (
identifier[stream] = identifier[stream] ,
identifier[path] = identifier[path] ,
identifier[tabix_path] = identifier[tabix_path] ,
identifier[record_checks] = identifier[record_checks] ,
identifier[parsed_samples] = identifier[parsed_samples] ,
) | def from_stream(klass, stream, path=None, tabix_path=None, record_checks=None, parsed_samples=None):
"""Create new :py:class:`Reader` from file
.. note::
If you use the ``parsed_samples`` feature and you write out
records then you must not change the ``FORMAT`` of the record.
:param stream: ``file``-like object to read from
:param path: optional string with path to store (for display only)
:param list record_checks: record checks to perform, can contain
'INFO' and 'FORMAT'
:param list parsed_samples: ``list`` of ``str`` values with names of
samples to parse call information for (for speedup); leave to
``None`` for ignoring
"""
record_checks = record_checks or []
if tabix_path and (not path):
raise ValueError('Must give path if tabix_path is given') # depends on [control=['if'], data=[]]
return Reader(stream=stream, path=path, tabix_path=tabix_path, record_checks=record_checks, parsed_samples=parsed_samples) |
def put(self, x, y, text, fg, bg):
"""
Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number
"""
if x < self.width and y < self.height:
try:
self.screen.addstr(y, x, symbols.encode(text), self.pairs[fg, bg])
except curses.error:
# Ignore out of bounds error
pass | def function[put, parameter[self, x, y, text, fg, bg]]:
constant[
Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number
]
if <ast.BoolOp object at 0x7da1b031eb30> begin[:]
<ast.Try object at 0x7da1b031c7f0> | keyword[def] identifier[put] ( identifier[self] , identifier[x] , identifier[y] , identifier[text] , identifier[fg] , identifier[bg] ):
literal[string]
keyword[if] identifier[x] < identifier[self] . identifier[width] keyword[and] identifier[y] < identifier[self] . identifier[height] :
keyword[try] :
identifier[self] . identifier[screen] . identifier[addstr] ( identifier[y] , identifier[x] , identifier[symbols] . identifier[encode] ( identifier[text] ), identifier[self] . identifier[pairs] [ identifier[fg] , identifier[bg] ])
keyword[except] identifier[curses] . identifier[error] :
keyword[pass] | def put(self, x, y, text, fg, bg):
"""
Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number
"""
if x < self.width and y < self.height:
try:
self.screen.addstr(y, x, symbols.encode(text), self.pairs[fg, bg]) # depends on [control=['try'], data=[]]
except curses.error:
# Ignore out of bounds error
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def deaccent(text):
"""
Remove accentuation from the given string. Input text is either a unicode string or utf8 encoded bytestring.
Return input string with accents removed, as unicode.
>>> deaccent("Šéf chomutovských komunistů dostal poštou bílý prášek")
u'Sef chomutovskych komunistu dostal postou bily prasek'
"""
if not isinstance(text, unicode):
# assume utf8 for byte strings, use default (strict) error handling
text = text.decode('utf8')
norm = unicodedata.normalize("NFD", text)
result = u('').join(ch for ch in norm if unicodedata.category(ch) != 'Mn')
return unicodedata.normalize("NFC", result) | def function[deaccent, parameter[text]]:
constant[
Remove accentuation from the given string. Input text is either a unicode string or utf8 encoded bytestring.
Return input string with accents removed, as unicode.
>>> deaccent("Šéf chomutovských komunistů dostal poštou bílý prášek")
u'Sef chomutovskych komunistu dostal postou bily prasek'
]
if <ast.UnaryOp object at 0x7da1b216dd20> begin[:]
variable[text] assign[=] call[name[text].decode, parameter[constant[utf8]]]
variable[norm] assign[=] call[name[unicodedata].normalize, parameter[constant[NFD], name[text]]]
variable[result] assign[=] call[call[name[u], parameter[constant[]]].join, parameter[<ast.GeneratorExp object at 0x7da1b216d180>]]
return[call[name[unicodedata].normalize, parameter[constant[NFC], name[result]]]] | keyword[def] identifier[deaccent] ( identifier[text] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[text] , identifier[unicode] ):
identifier[text] = identifier[text] . identifier[decode] ( literal[string] )
identifier[norm] = identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[text] )
identifier[result] = identifier[u] ( literal[string] ). identifier[join] ( identifier[ch] keyword[for] identifier[ch] keyword[in] identifier[norm] keyword[if] identifier[unicodedata] . identifier[category] ( identifier[ch] )!= literal[string] )
keyword[return] identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[result] ) | def deaccent(text):
"""
Remove accentuation from the given string. Input text is either a unicode string or utf8 encoded bytestring.
Return input string with accents removed, as unicode.
>>> deaccent("Šéf chomutovských komunistů dostal poštou bílý prášek")
u'Sef chomutovskych komunistu dostal postou bily prasek'
"""
if not isinstance(text, unicode):
# assume utf8 for byte strings, use default (strict) error handling
text = text.decode('utf8') # depends on [control=['if'], data=[]]
norm = unicodedata.normalize('NFD', text)
result = u('').join((ch for ch in norm if unicodedata.category(ch) != 'Mn'))
return unicodedata.normalize('NFC', result) |
def get_sqla_coltype_from_dialect_str(coltype: str,
dialect: Dialect) -> TypeEngine:
"""
Returns an SQLAlchemy column type, given a column type name (a string) and
an SQLAlchemy dialect. For example, this might convert the string
``INTEGER(11)`` to an SQLAlchemy ``Integer(length=11)``.
Args:
dialect: a SQLAlchemy :class:`Dialect` class
coltype: a ``str()`` representation, e.g. from ``str(c['type'])`` where
``c`` is an instance of :class:`sqlalchemy.sql.schema.Column`.
Returns:
a Python object that is a subclass of
:class:`sqlalchemy.types.TypeEngine`
Example:
.. code-block:: python
get_sqla_coltype_from_string('INTEGER(11)', engine.dialect)
# gives: Integer(length=11)
Notes:
- :class:`sqlalchemy.engine.default.DefaultDialect` is the dialect base
class
- a dialect contains these things of interest:
- ``ischema_names``: string-to-class dictionary
- ``type_compiler``: instance of e.g.
:class:`sqlalchemy.sql.compiler.GenericTypeCompiler`. This has a
``process()`` method, but that operates on :class:`TypeEngine` objects.
- ``get_columns``: takes a table name, inspects the database
- example of the dangers of ``eval``:
http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
- An example of a function doing the reflection/inspection within
SQLAlchemy is
:func:`sqlalchemy.dialects.mssql.base.MSDialect.get_columns`,
which has this lookup: ``coltype = self.ischema_names.get(type, None)``
Caveats:
- the parameters, e.g. ``DATETIME(6)``, do NOT necessarily either work at
all or work correctly. For example, SQLAlchemy will happily spit out
``'INTEGER(11)'`` but its :class:`sqlalchemy.sql.sqltypes.INTEGER` class
takes no parameters, so you get the error ``TypeError: object() takes no
parameters``. Similarly, MySQL's ``DATETIME(6)`` uses the 6 to refer to
precision, but the ``DATETIME`` class in SQLAlchemy takes only a boolean
parameter (timezone).
- However, sometimes we have to have parameters, e.g. ``VARCHAR`` length.
- Thus, this is a bit useless.
- Fixed, with a few special cases.
"""
size = None # type: Optional[int]
dp = None # type: Optional[int]
args = [] # type: List[Any]
kwargs = {} # type: Dict[str, Any]
basetype = ''
# noinspection PyPep8,PyBroadException
try:
# Split e.g. "VARCHAR(32) COLLATE blah" into "VARCHAR(32)", "who cares"
m = RE_COLTYPE_WITH_COLLATE.match(coltype)
if m is not None:
coltype = m.group('maintype')
found = False
if not found:
# Deal with ENUM('a', 'b', 'c', ...)
m = RE_MYSQL_ENUM_COLTYPE.match(coltype)
if m is not None:
# Convert to VARCHAR with max size being that of largest enum
basetype = 'VARCHAR'
values = get_list_of_sql_string_literals_from_quoted_csv(
m.group('valuelist'))
length = max(len(x) for x in values)
kwargs = {'length': length}
found = True
if not found:
# Split e.g. "DECIMAL(10, 2)" into DECIMAL, 10, 2
m = RE_COLTYPE_WITH_TWO_PARAMS.match(coltype)
if m is not None:
basetype = m.group('type').upper()
size = ast.literal_eval(m.group('size'))
dp = ast.literal_eval(m.group('dp'))
found = True
if not found:
# Split e.g. "VARCHAR(32)" into VARCHAR, 32
m = RE_COLTYPE_WITH_ONE_PARAM.match(coltype)
if m is not None:
basetype = m.group('type').upper()
size_text = m.group('size').strip().upper()
if size_text != 'MAX':
size = ast.literal_eval(size_text)
found = True
if not found:
basetype = coltype.upper()
# Special cases: pre-processing
# noinspection PyUnresolvedReferences
if (dialect.name == SqlaDialectName.MSSQL and
basetype.lower() == 'integer'):
basetype = 'int'
cls = _get_sqla_coltype_class_from_str(basetype, dialect)
# Special cases: post-processing
if basetype == 'DATETIME' and size:
# First argument to DATETIME() is timezone, so...
# noinspection PyUnresolvedReferences
if dialect.name == SqlaDialectName.MYSQL:
kwargs = {'fsp': size}
else:
pass
else:
args = [x for x in (size, dp) if x is not None]
try:
return cls(*args, **kwargs)
except TypeError:
return cls()
except:
# noinspection PyUnresolvedReferences
raise ValueError("Failed to convert SQL type {} in dialect {} to an "
"SQLAlchemy type".format(repr(coltype),
repr(dialect.name))) | def function[get_sqla_coltype_from_dialect_str, parameter[coltype, dialect]]:
constant[
Returns an SQLAlchemy column type, given a column type name (a string) and
an SQLAlchemy dialect. For example, this might convert the string
``INTEGER(11)`` to an SQLAlchemy ``Integer(length=11)``.
Args:
dialect: a SQLAlchemy :class:`Dialect` class
coltype: a ``str()`` representation, e.g. from ``str(c['type'])`` where
``c`` is an instance of :class:`sqlalchemy.sql.schema.Column`.
Returns:
a Python object that is a subclass of
:class:`sqlalchemy.types.TypeEngine`
Example:
.. code-block:: python
get_sqla_coltype_from_string('INTEGER(11)', engine.dialect)
# gives: Integer(length=11)
Notes:
- :class:`sqlalchemy.engine.default.DefaultDialect` is the dialect base
class
- a dialect contains these things of interest:
- ``ischema_names``: string-to-class dictionary
- ``type_compiler``: instance of e.g.
:class:`sqlalchemy.sql.compiler.GenericTypeCompiler`. This has a
``process()`` method, but that operates on :class:`TypeEngine` objects.
- ``get_columns``: takes a table name, inspects the database
- example of the dangers of ``eval``:
http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
- An example of a function doing the reflection/inspection within
SQLAlchemy is
:func:`sqlalchemy.dialects.mssql.base.MSDialect.get_columns`,
which has this lookup: ``coltype = self.ischema_names.get(type, None)``
Caveats:
- the parameters, e.g. ``DATETIME(6)``, do NOT necessarily either work at
all or work correctly. For example, SQLAlchemy will happily spit out
``'INTEGER(11)'`` but its :class:`sqlalchemy.sql.sqltypes.INTEGER` class
takes no parameters, so you get the error ``TypeError: object() takes no
parameters``. Similarly, MySQL's ``DATETIME(6)`` uses the 6 to refer to
precision, but the ``DATETIME`` class in SQLAlchemy takes only a boolean
parameter (timezone).
- However, sometimes we have to have parameters, e.g. ``VARCHAR`` length.
- Thus, this is a bit useless.
- Fixed, with a few special cases.
]
variable[size] assign[=] constant[None]
variable[dp] assign[=] constant[None]
variable[args] assign[=] list[[]]
variable[kwargs] assign[=] dictionary[[], []]
variable[basetype] assign[=] constant[]
<ast.Try object at 0x7da1b189e770> | keyword[def] identifier[get_sqla_coltype_from_dialect_str] ( identifier[coltype] : identifier[str] ,
identifier[dialect] : identifier[Dialect] )-> identifier[TypeEngine] :
literal[string]
identifier[size] = keyword[None]
identifier[dp] = keyword[None]
identifier[args] =[]
identifier[kwargs] ={}
identifier[basetype] = literal[string]
keyword[try] :
identifier[m] = identifier[RE_COLTYPE_WITH_COLLATE] . identifier[match] ( identifier[coltype] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[coltype] = identifier[m] . identifier[group] ( literal[string] )
identifier[found] = keyword[False]
keyword[if] keyword[not] identifier[found] :
identifier[m] = identifier[RE_MYSQL_ENUM_COLTYPE] . identifier[match] ( identifier[coltype] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[basetype] = literal[string]
identifier[values] = identifier[get_list_of_sql_string_literals_from_quoted_csv] (
identifier[m] . identifier[group] ( literal[string] ))
identifier[length] = identifier[max] ( identifier[len] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[values] )
identifier[kwargs] ={ literal[string] : identifier[length] }
identifier[found] = keyword[True]
keyword[if] keyword[not] identifier[found] :
identifier[m] = identifier[RE_COLTYPE_WITH_TWO_PARAMS] . identifier[match] ( identifier[coltype] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[basetype] = identifier[m] . identifier[group] ( literal[string] ). identifier[upper] ()
identifier[size] = identifier[ast] . identifier[literal_eval] ( identifier[m] . identifier[group] ( literal[string] ))
identifier[dp] = identifier[ast] . identifier[literal_eval] ( identifier[m] . identifier[group] ( literal[string] ))
identifier[found] = keyword[True]
keyword[if] keyword[not] identifier[found] :
identifier[m] = identifier[RE_COLTYPE_WITH_ONE_PARAM] . identifier[match] ( identifier[coltype] )
keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] :
identifier[basetype] = identifier[m] . identifier[group] ( literal[string] ). identifier[upper] ()
identifier[size_text] = identifier[m] . identifier[group] ( literal[string] ). identifier[strip] (). identifier[upper] ()
keyword[if] identifier[size_text] != literal[string] :
identifier[size] = identifier[ast] . identifier[literal_eval] ( identifier[size_text] )
identifier[found] = keyword[True]
keyword[if] keyword[not] identifier[found] :
identifier[basetype] = identifier[coltype] . identifier[upper] ()
keyword[if] ( identifier[dialect] . identifier[name] == identifier[SqlaDialectName] . identifier[MSSQL] keyword[and]
identifier[basetype] . identifier[lower] ()== literal[string] ):
identifier[basetype] = literal[string]
identifier[cls] = identifier[_get_sqla_coltype_class_from_str] ( identifier[basetype] , identifier[dialect] )
keyword[if] identifier[basetype] == literal[string] keyword[and] identifier[size] :
keyword[if] identifier[dialect] . identifier[name] == identifier[SqlaDialectName] . identifier[MYSQL] :
identifier[kwargs] ={ literal[string] : identifier[size] }
keyword[else] :
keyword[pass]
keyword[else] :
identifier[args] =[ identifier[x] keyword[for] identifier[x] keyword[in] ( identifier[size] , identifier[dp] ) keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] ]
keyword[try] :
keyword[return] identifier[cls] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[TypeError] :
keyword[return] identifier[cls] ()
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[repr] ( identifier[coltype] ),
identifier[repr] ( identifier[dialect] . identifier[name] ))) | def get_sqla_coltype_from_dialect_str(coltype: str, dialect: Dialect) -> TypeEngine:
"""
Returns an SQLAlchemy column type, given a column type name (a string) and
an SQLAlchemy dialect. For example, this might convert the string
``INTEGER(11)`` to an SQLAlchemy ``Integer(length=11)``.
Args:
dialect: a SQLAlchemy :class:`Dialect` class
coltype: a ``str()`` representation, e.g. from ``str(c['type'])`` where
``c`` is an instance of :class:`sqlalchemy.sql.schema.Column`.
Returns:
a Python object that is a subclass of
:class:`sqlalchemy.types.TypeEngine`
Example:
.. code-block:: python
get_sqla_coltype_from_string('INTEGER(11)', engine.dialect)
# gives: Integer(length=11)
Notes:
- :class:`sqlalchemy.engine.default.DefaultDialect` is the dialect base
class
- a dialect contains these things of interest:
- ``ischema_names``: string-to-class dictionary
- ``type_compiler``: instance of e.g.
:class:`sqlalchemy.sql.compiler.GenericTypeCompiler`. This has a
``process()`` method, but that operates on :class:`TypeEngine` objects.
- ``get_columns``: takes a table name, inspects the database
- example of the dangers of ``eval``:
http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
- An example of a function doing the reflection/inspection within
SQLAlchemy is
:func:`sqlalchemy.dialects.mssql.base.MSDialect.get_columns`,
which has this lookup: ``coltype = self.ischema_names.get(type, None)``
Caveats:
- the parameters, e.g. ``DATETIME(6)``, do NOT necessarily either work at
all or work correctly. For example, SQLAlchemy will happily spit out
``'INTEGER(11)'`` but its :class:`sqlalchemy.sql.sqltypes.INTEGER` class
takes no parameters, so you get the error ``TypeError: object() takes no
parameters``. Similarly, MySQL's ``DATETIME(6)`` uses the 6 to refer to
precision, but the ``DATETIME`` class in SQLAlchemy takes only a boolean
parameter (timezone).
- However, sometimes we have to have parameters, e.g. ``VARCHAR`` length.
- Thus, this is a bit useless.
- Fixed, with a few special cases.
"""
size = None # type: Optional[int]
dp = None # type: Optional[int]
args = [] # type: List[Any]
kwargs = {} # type: Dict[str, Any]
basetype = ''
# noinspection PyPep8,PyBroadException
try:
# Split e.g. "VARCHAR(32) COLLATE blah" into "VARCHAR(32)", "who cares"
m = RE_COLTYPE_WITH_COLLATE.match(coltype)
if m is not None:
coltype = m.group('maintype') # depends on [control=['if'], data=['m']]
found = False
if not found:
# Deal with ENUM('a', 'b', 'c', ...)
m = RE_MYSQL_ENUM_COLTYPE.match(coltype)
if m is not None:
# Convert to VARCHAR with max size being that of largest enum
basetype = 'VARCHAR'
values = get_list_of_sql_string_literals_from_quoted_csv(m.group('valuelist'))
length = max((len(x) for x in values))
kwargs = {'length': length}
found = True # depends on [control=['if'], data=['m']] # depends on [control=['if'], data=[]]
if not found:
# Split e.g. "DECIMAL(10, 2)" into DECIMAL, 10, 2
m = RE_COLTYPE_WITH_TWO_PARAMS.match(coltype)
if m is not None:
basetype = m.group('type').upper()
size = ast.literal_eval(m.group('size'))
dp = ast.literal_eval(m.group('dp'))
found = True # depends on [control=['if'], data=['m']] # depends on [control=['if'], data=[]]
if not found:
# Split e.g. "VARCHAR(32)" into VARCHAR, 32
m = RE_COLTYPE_WITH_ONE_PARAM.match(coltype)
if m is not None:
basetype = m.group('type').upper()
size_text = m.group('size').strip().upper()
if size_text != 'MAX':
size = ast.literal_eval(size_text) # depends on [control=['if'], data=['size_text']]
found = True # depends on [control=['if'], data=['m']] # depends on [control=['if'], data=[]]
if not found:
basetype = coltype.upper() # depends on [control=['if'], data=[]]
# Special cases: pre-processing
# noinspection PyUnresolvedReferences
if dialect.name == SqlaDialectName.MSSQL and basetype.lower() == 'integer':
basetype = 'int' # depends on [control=['if'], data=[]]
cls = _get_sqla_coltype_class_from_str(basetype, dialect)
# Special cases: post-processing
if basetype == 'DATETIME' and size:
# First argument to DATETIME() is timezone, so...
# noinspection PyUnresolvedReferences
if dialect.name == SqlaDialectName.MYSQL:
kwargs = {'fsp': size} # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['if'], data=[]]
else:
args = [x for x in (size, dp) if x is not None]
try:
return cls(*args, **kwargs) # depends on [control=['try'], data=[]]
except TypeError:
return cls() # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except:
# noinspection PyUnresolvedReferences
raise ValueError('Failed to convert SQL type {} in dialect {} to an SQLAlchemy type'.format(repr(coltype), repr(dialect.name))) # depends on [control=['except'], data=[]] |
def copy_byte_range(infile, outfile, start=None, stop=None, bufsize=16*1024):
'''Like shutil.copyfileobj, but only copy a range of the streams.
Both start and stop are inclusive.
'''
if start is not None: infile.seek(start)
while 1:
to_read = min(bufsize, stop + 1 - infile.tell() if stop else bufsize)
buf = infile.read(to_read)
if not buf:
break
outfile.write(buf) | def function[copy_byte_range, parameter[infile, outfile, start, stop, bufsize]]:
constant[Like shutil.copyfileobj, but only copy a range of the streams.
Both start and stop are inclusive.
]
if compare[name[start] is_not constant[None]] begin[:]
call[name[infile].seek, parameter[name[start]]]
while constant[1] begin[:]
variable[to_read] assign[=] call[name[min], parameter[name[bufsize], <ast.IfExp object at 0x7da1b06fd000>]]
variable[buf] assign[=] call[name[infile].read, parameter[name[to_read]]]
if <ast.UnaryOp object at 0x7da1b06fd1b0> begin[:]
break
call[name[outfile].write, parameter[name[buf]]] | keyword[def] identifier[copy_byte_range] ( identifier[infile] , identifier[outfile] , identifier[start] = keyword[None] , identifier[stop] = keyword[None] , identifier[bufsize] = literal[int] * literal[int] ):
literal[string]
keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] : identifier[infile] . identifier[seek] ( identifier[start] )
keyword[while] literal[int] :
identifier[to_read] = identifier[min] ( identifier[bufsize] , identifier[stop] + literal[int] - identifier[infile] . identifier[tell] () keyword[if] identifier[stop] keyword[else] identifier[bufsize] )
identifier[buf] = identifier[infile] . identifier[read] ( identifier[to_read] )
keyword[if] keyword[not] identifier[buf] :
keyword[break]
identifier[outfile] . identifier[write] ( identifier[buf] ) | def copy_byte_range(infile, outfile, start=None, stop=None, bufsize=16 * 1024):
"""Like shutil.copyfileobj, but only copy a range of the streams.
Both start and stop are inclusive.
"""
if start is not None:
infile.seek(start) # depends on [control=['if'], data=['start']]
while 1:
to_read = min(bufsize, stop + 1 - infile.tell() if stop else bufsize)
buf = infile.read(to_read)
if not buf:
break # depends on [control=['if'], data=[]]
outfile.write(buf) # depends on [control=['while'], data=[]] |
def get_time_period(period_name):
"""
Given a time period name, fetch the hydra-compatible time
abbreviation.
"""
time_abbreviation = time_map.get(period_name.lower())
if time_abbreviation is None:
raise Exception("Symbol %s not recognised as a time period"%period_name)
return time_abbreviation | def function[get_time_period, parameter[period_name]]:
constant[
Given a time period name, fetch the hydra-compatible time
abbreviation.
]
variable[time_abbreviation] assign[=] call[name[time_map].get, parameter[call[name[period_name].lower, parameter[]]]]
if compare[name[time_abbreviation] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1857ac0>
return[name[time_abbreviation]] | keyword[def] identifier[get_time_period] ( identifier[period_name] ):
literal[string]
identifier[time_abbreviation] = identifier[time_map] . identifier[get] ( identifier[period_name] . identifier[lower] ())
keyword[if] identifier[time_abbreviation] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[period_name] )
keyword[return] identifier[time_abbreviation] | def get_time_period(period_name):
"""
Given a time period name, fetch the hydra-compatible time
abbreviation.
"""
time_abbreviation = time_map.get(period_name.lower())
if time_abbreviation is None:
raise Exception('Symbol %s not recognised as a time period' % period_name) # depends on [control=['if'], data=[]]
return time_abbreviation |
def json_default(obj):
"""Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert
"""
for default in _JSON_DEFAULTS:
if default[0](obj):
return default[1](obj)
raise TypeError(repr(obj) + " is not JSON serializable") | def function[json_default, parameter[obj]]:
constant[Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert
]
for taget[name[default]] in starred[name[_JSON_DEFAULTS]] begin[:]
if call[call[name[default]][constant[0]], parameter[name[obj]]] begin[:]
return[call[call[name[default]][constant[1]], parameter[name[obj]]]]
<ast.Raise object at 0x7da18f00ff40> | keyword[def] identifier[json_default] ( identifier[obj] ):
literal[string]
keyword[for] identifier[default] keyword[in] identifier[_JSON_DEFAULTS] :
keyword[if] identifier[default] [ literal[int] ]( identifier[obj] ):
keyword[return] identifier[default] [ literal[int] ]( identifier[obj] )
keyword[raise] identifier[TypeError] ( identifier[repr] ( identifier[obj] )+ literal[string] ) | def json_default(obj):
"""Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert
"""
for default in _JSON_DEFAULTS:
if default[0](obj):
return default[1](obj) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['default']]
raise TypeError(repr(obj) + ' is not JSON serializable') |
def _configure_pool(kwargs):
"""Configures the pool and keeps the storage service"""
_pool_single_run.storage_service = kwargs['storage_service']
_configure_niceness(kwargs)
_configure_logging(kwargs, extract=False) | def function[_configure_pool, parameter[kwargs]]:
constant[Configures the pool and keeps the storage service]
name[_pool_single_run].storage_service assign[=] call[name[kwargs]][constant[storage_service]]
call[name[_configure_niceness], parameter[name[kwargs]]]
call[name[_configure_logging], parameter[name[kwargs]]] | keyword[def] identifier[_configure_pool] ( identifier[kwargs] ):
literal[string]
identifier[_pool_single_run] . identifier[storage_service] = identifier[kwargs] [ literal[string] ]
identifier[_configure_niceness] ( identifier[kwargs] )
identifier[_configure_logging] ( identifier[kwargs] , identifier[extract] = keyword[False] ) | def _configure_pool(kwargs):
"""Configures the pool and keeps the storage service"""
_pool_single_run.storage_service = kwargs['storage_service']
_configure_niceness(kwargs)
_configure_logging(kwargs, extract=False) |
def capacity_meyerhof_1963(sl, fd, gwl=1e6, h_l=0, h_b=0, vertical_load=1, verbose=0, **kwargs):
"""
Calculates the foundation capacity according Meyerhoff (1963)
http://www.engs-comp.com/meyerhof/index.shtml
:param sl: Soil object
:param fd: Foundation object
:param h_l: Horizontal load parallel to length
:param h_b: Horizontal load parallel to width
:param vertical_load: Vertical load
:param verbose: verbosity
:return: ultimate bearing stress
"""
if not kwargs.get("disable_requires", False):
models.check_required(sl, ["phi_r", "cohesion", "unit_dry_weight"])
models.check_required(fd, ["length", "width", "depth"])
horizontal_load = np.sqrt(h_l ** 2 + h_b ** 2)
fd.nq_factor = ((np.tan(np.pi / 4 + sl.phi_r / 2)) ** 2 *
np.exp(np.pi * np.tan(sl.phi_r)))
if sl.phi_r == 0:
fd.nc_factor = 5.14
else:
fd.nc_factor = (fd.nq_factor - 1) / np.tan(sl.phi_r)
fd.ng_factor = (fd.nq_factor - 1) * np.tan(1.4 * sl.phi_r)
if verbose:
log("Nc: ", fd.nc_factor)
log("Nq: ", fd.nq_factor)
log("Ng: ", fd.ng_factor)
kp = (np.tan(np.pi / 4 + sl.phi_r / 2)) ** 2
# shape factors
s_c = 1 + 0.2 * kp * fd.width / fd.length
if sl.phi > 10:
s_q = 1.0 + 0.1 * kp * fd.width / fd.length
else:
s_q = 1.0
s_g = s_q
# depth factors
d_c = 1 + 0.2 * np.sqrt(kp) * fd.depth / fd.width
if sl.phi > 10:
d_q = 1 + 0.1 * np.sqrt(kp) * fd.depth / fd.width
else:
d_q = 1.0
d_g = d_q
# inclination factors:
theta_load = np.arctan(horizontal_load / vertical_load)
i_c = (1 - theta_load / (np.pi * 0.5)) ** 2
i_q = i_c
if sl.phi > 0:
i_g = (1 - theta_load / sl.phi_r) ** 2
else:
i_g = 0
# stress at footing base:
if gwl == 0:
q_d = sl.unit_bouy_weight * fd.depth
unit_weight = sl.unit_bouy_weight
elif gwl > 0 and gwl < fd.depth:
q_d = (sl.unit_dry_weight * gwl) + (sl.unit_bouy_weight * (fd.depth - gwl))
unit_weight = sl.unit_bouy_weight
elif gwl >= fd.depth and gwl <= fd.depth + fd.width:
sl.average_unit_bouy_weight = sl.unit_bouy_weight + (
((gwl - fd.depth) / fd.width) * (sl.unit_dry_weight - sl.unit_bouy_weight))
q_d = sl.unit_dry_weight * fd.depth
unit_weight = sl.average_unit_bouy_weight
elif gwl > fd.depth + fd.width:
q_d = sl.unit_dry_weight * fd.depth
unit_weight = sl.unit_dry_weight
if verbose:
log("Nc: ", fd.nc_factor)
log("Nq: ", fd.nq_factor)
log("Ng: ", fd.ng_factor)
log("s_c: ", s_c)
log("s_q: ", s_q)
log("s_g: ", s_g)
log("d_c: ", d_c)
log("d_q: ", d_q)
log("d_g: ", d_g)
log("i_c: ", i_c)
log("i_q: ", i_q)
log("i_g: ", i_g)
log("q_d: ", q_d)
# Capacity
fd.q_ult = (sl.cohesion * fd.nc_factor * s_c * d_c * i_c +
q_d * fd.nq_factor * s_q * d_q * i_q +
0.5 * fd.width * unit_weight *
fd.ng_factor * s_g * d_g * i_g)
return fd.q_ult | def function[capacity_meyerhof_1963, parameter[sl, fd, gwl, h_l, h_b, vertical_load, verbose]]:
constant[
Calculates the foundation capacity according Meyerhoff (1963)
http://www.engs-comp.com/meyerhof/index.shtml
:param sl: Soil object
:param fd: Foundation object
:param h_l: Horizontal load parallel to length
:param h_b: Horizontal load parallel to width
:param vertical_load: Vertical load
:param verbose: verbosity
:return: ultimate bearing stress
]
if <ast.UnaryOp object at 0x7da1b033e410> begin[:]
call[name[models].check_required, parameter[name[sl], list[[<ast.Constant object at 0x7da1b033cdf0>, <ast.Constant object at 0x7da1b033e8f0>, <ast.Constant object at 0x7da1b033e7a0>]]]]
call[name[models].check_required, parameter[name[fd], list[[<ast.Constant object at 0x7da1b033e2f0>, <ast.Constant object at 0x7da1b033e0e0>, <ast.Constant object at 0x7da1b033ebf0>]]]]
variable[horizontal_load] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[h_l] ** constant[2]] + binary_operation[name[h_b] ** constant[2]]]]]
name[fd].nq_factor assign[=] binary_operation[binary_operation[call[name[np].tan, parameter[binary_operation[binary_operation[name[np].pi / constant[4]] + binary_operation[name[sl].phi_r / constant[2]]]]] ** constant[2]] * call[name[np].exp, parameter[binary_operation[name[np].pi * call[name[np].tan, parameter[name[sl].phi_r]]]]]]
if compare[name[sl].phi_r equal[==] constant[0]] begin[:]
name[fd].nc_factor assign[=] constant[5.14]
name[fd].ng_factor assign[=] binary_operation[binary_operation[name[fd].nq_factor - constant[1]] * call[name[np].tan, parameter[binary_operation[constant[1.4] * name[sl].phi_r]]]]
if name[verbose] begin[:]
call[name[log], parameter[constant[Nc: ], name[fd].nc_factor]]
call[name[log], parameter[constant[Nq: ], name[fd].nq_factor]]
call[name[log], parameter[constant[Ng: ], name[fd].ng_factor]]
variable[kp] assign[=] binary_operation[call[name[np].tan, parameter[binary_operation[binary_operation[name[np].pi / constant[4]] + binary_operation[name[sl].phi_r / constant[2]]]]] ** constant[2]]
variable[s_c] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[binary_operation[constant[0.2] * name[kp]] * name[fd].width] / name[fd].length]]
if compare[name[sl].phi greater[>] constant[10]] begin[:]
variable[s_q] assign[=] binary_operation[constant[1.0] + binary_operation[binary_operation[binary_operation[constant[0.1] * name[kp]] * name[fd].width] / name[fd].length]]
variable[s_g] assign[=] name[s_q]
variable[d_c] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[binary_operation[constant[0.2] * call[name[np].sqrt, parameter[name[kp]]]] * name[fd].depth] / name[fd].width]]
if compare[name[sl].phi greater[>] constant[10]] begin[:]
variable[d_q] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[binary_operation[constant[0.1] * call[name[np].sqrt, parameter[name[kp]]]] * name[fd].depth] / name[fd].width]]
variable[d_g] assign[=] name[d_q]
variable[theta_load] assign[=] call[name[np].arctan, parameter[binary_operation[name[horizontal_load] / name[vertical_load]]]]
variable[i_c] assign[=] binary_operation[binary_operation[constant[1] - binary_operation[name[theta_load] / binary_operation[name[np].pi * constant[0.5]]]] ** constant[2]]
variable[i_q] assign[=] name[i_c]
if compare[name[sl].phi greater[>] constant[0]] begin[:]
variable[i_g] assign[=] binary_operation[binary_operation[constant[1] - binary_operation[name[theta_load] / name[sl].phi_r]] ** constant[2]]
if compare[name[gwl] equal[==] constant[0]] begin[:]
variable[q_d] assign[=] binary_operation[name[sl].unit_bouy_weight * name[fd].depth]
variable[unit_weight] assign[=] name[sl].unit_bouy_weight
if name[verbose] begin[:]
call[name[log], parameter[constant[Nc: ], name[fd].nc_factor]]
call[name[log], parameter[constant[Nq: ], name[fd].nq_factor]]
call[name[log], parameter[constant[Ng: ], name[fd].ng_factor]]
call[name[log], parameter[constant[s_c: ], name[s_c]]]
call[name[log], parameter[constant[s_q: ], name[s_q]]]
call[name[log], parameter[constant[s_g: ], name[s_g]]]
call[name[log], parameter[constant[d_c: ], name[d_c]]]
call[name[log], parameter[constant[d_q: ], name[d_q]]]
call[name[log], parameter[constant[d_g: ], name[d_g]]]
call[name[log], parameter[constant[i_c: ], name[i_c]]]
call[name[log], parameter[constant[i_q: ], name[i_q]]]
call[name[log], parameter[constant[i_g: ], name[i_g]]]
call[name[log], parameter[constant[q_d: ], name[q_d]]]
name[fd].q_ult assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[sl].cohesion * name[fd].nc_factor] * name[s_c]] * name[d_c]] * name[i_c]] + binary_operation[binary_operation[binary_operation[binary_operation[name[q_d] * name[fd].nq_factor] * name[s_q]] * name[d_q]] * name[i_q]]] + binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[0.5] * name[fd].width] * name[unit_weight]] * name[fd].ng_factor] * name[s_g]] * name[d_g]] * name[i_g]]]
return[name[fd].q_ult] | keyword[def] identifier[capacity_meyerhof_1963] ( identifier[sl] , identifier[fd] , identifier[gwl] = literal[int] , identifier[h_l] = literal[int] , identifier[h_b] = literal[int] , identifier[vertical_load] = literal[int] , identifier[verbose] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ):
identifier[models] . identifier[check_required] ( identifier[sl] ,[ literal[string] , literal[string] , literal[string] ])
identifier[models] . identifier[check_required] ( identifier[fd] ,[ literal[string] , literal[string] , literal[string] ])
identifier[horizontal_load] = identifier[np] . identifier[sqrt] ( identifier[h_l] ** literal[int] + identifier[h_b] ** literal[int] )
identifier[fd] . identifier[nq_factor] =(( identifier[np] . identifier[tan] ( identifier[np] . identifier[pi] / literal[int] + identifier[sl] . identifier[phi_r] / literal[int] ))** literal[int] *
identifier[np] . identifier[exp] ( identifier[np] . identifier[pi] * identifier[np] . identifier[tan] ( identifier[sl] . identifier[phi_r] )))
keyword[if] identifier[sl] . identifier[phi_r] == literal[int] :
identifier[fd] . identifier[nc_factor] = literal[int]
keyword[else] :
identifier[fd] . identifier[nc_factor] =( identifier[fd] . identifier[nq_factor] - literal[int] )/ identifier[np] . identifier[tan] ( identifier[sl] . identifier[phi_r] )
identifier[fd] . identifier[ng_factor] =( identifier[fd] . identifier[nq_factor] - literal[int] )* identifier[np] . identifier[tan] ( literal[int] * identifier[sl] . identifier[phi_r] )
keyword[if] identifier[verbose] :
identifier[log] ( literal[string] , identifier[fd] . identifier[nc_factor] )
identifier[log] ( literal[string] , identifier[fd] . identifier[nq_factor] )
identifier[log] ( literal[string] , identifier[fd] . identifier[ng_factor] )
identifier[kp] =( identifier[np] . identifier[tan] ( identifier[np] . identifier[pi] / literal[int] + identifier[sl] . identifier[phi_r] / literal[int] ))** literal[int]
identifier[s_c] = literal[int] + literal[int] * identifier[kp] * identifier[fd] . identifier[width] / identifier[fd] . identifier[length]
keyword[if] identifier[sl] . identifier[phi] > literal[int] :
identifier[s_q] = literal[int] + literal[int] * identifier[kp] * identifier[fd] . identifier[width] / identifier[fd] . identifier[length]
keyword[else] :
identifier[s_q] = literal[int]
identifier[s_g] = identifier[s_q]
identifier[d_c] = literal[int] + literal[int] * identifier[np] . identifier[sqrt] ( identifier[kp] )* identifier[fd] . identifier[depth] / identifier[fd] . identifier[width]
keyword[if] identifier[sl] . identifier[phi] > literal[int] :
identifier[d_q] = literal[int] + literal[int] * identifier[np] . identifier[sqrt] ( identifier[kp] )* identifier[fd] . identifier[depth] / identifier[fd] . identifier[width]
keyword[else] :
identifier[d_q] = literal[int]
identifier[d_g] = identifier[d_q]
identifier[theta_load] = identifier[np] . identifier[arctan] ( identifier[horizontal_load] / identifier[vertical_load] )
identifier[i_c] =( literal[int] - identifier[theta_load] /( identifier[np] . identifier[pi] * literal[int] ))** literal[int]
identifier[i_q] = identifier[i_c]
keyword[if] identifier[sl] . identifier[phi] > literal[int] :
identifier[i_g] =( literal[int] - identifier[theta_load] / identifier[sl] . identifier[phi_r] )** literal[int]
keyword[else] :
identifier[i_g] = literal[int]
keyword[if] identifier[gwl] == literal[int] :
identifier[q_d] = identifier[sl] . identifier[unit_bouy_weight] * identifier[fd] . identifier[depth]
identifier[unit_weight] = identifier[sl] . identifier[unit_bouy_weight]
keyword[elif] identifier[gwl] > literal[int] keyword[and] identifier[gwl] < identifier[fd] . identifier[depth] :
identifier[q_d] =( identifier[sl] . identifier[unit_dry_weight] * identifier[gwl] )+( identifier[sl] . identifier[unit_bouy_weight] *( identifier[fd] . identifier[depth] - identifier[gwl] ))
identifier[unit_weight] = identifier[sl] . identifier[unit_bouy_weight]
keyword[elif] identifier[gwl] >= identifier[fd] . identifier[depth] keyword[and] identifier[gwl] <= identifier[fd] . identifier[depth] + identifier[fd] . identifier[width] :
identifier[sl] . identifier[average_unit_bouy_weight] = identifier[sl] . identifier[unit_bouy_weight] +(
(( identifier[gwl] - identifier[fd] . identifier[depth] )/ identifier[fd] . identifier[width] )*( identifier[sl] . identifier[unit_dry_weight] - identifier[sl] . identifier[unit_bouy_weight] ))
identifier[q_d] = identifier[sl] . identifier[unit_dry_weight] * identifier[fd] . identifier[depth]
identifier[unit_weight] = identifier[sl] . identifier[average_unit_bouy_weight]
keyword[elif] identifier[gwl] > identifier[fd] . identifier[depth] + identifier[fd] . identifier[width] :
identifier[q_d] = identifier[sl] . identifier[unit_dry_weight] * identifier[fd] . identifier[depth]
identifier[unit_weight] = identifier[sl] . identifier[unit_dry_weight]
keyword[if] identifier[verbose] :
identifier[log] ( literal[string] , identifier[fd] . identifier[nc_factor] )
identifier[log] ( literal[string] , identifier[fd] . identifier[nq_factor] )
identifier[log] ( literal[string] , identifier[fd] . identifier[ng_factor] )
identifier[log] ( literal[string] , identifier[s_c] )
identifier[log] ( literal[string] , identifier[s_q] )
identifier[log] ( literal[string] , identifier[s_g] )
identifier[log] ( literal[string] , identifier[d_c] )
identifier[log] ( literal[string] , identifier[d_q] )
identifier[log] ( literal[string] , identifier[d_g] )
identifier[log] ( literal[string] , identifier[i_c] )
identifier[log] ( literal[string] , identifier[i_q] )
identifier[log] ( literal[string] , identifier[i_g] )
identifier[log] ( literal[string] , identifier[q_d] )
identifier[fd] . identifier[q_ult] =( identifier[sl] . identifier[cohesion] * identifier[fd] . identifier[nc_factor] * identifier[s_c] * identifier[d_c] * identifier[i_c] +
identifier[q_d] * identifier[fd] . identifier[nq_factor] * identifier[s_q] * identifier[d_q] * identifier[i_q] +
literal[int] * identifier[fd] . identifier[width] * identifier[unit_weight] *
identifier[fd] . identifier[ng_factor] * identifier[s_g] * identifier[d_g] * identifier[i_g] )
keyword[return] identifier[fd] . identifier[q_ult] | def capacity_meyerhof_1963(sl, fd, gwl=1000000.0, h_l=0, h_b=0, vertical_load=1, verbose=0, **kwargs):
"""
Calculates the foundation capacity according Meyerhoff (1963)
http://www.engs-comp.com/meyerhof/index.shtml
:param sl: Soil object
:param fd: Foundation object
:param h_l: Horizontal load parallel to length
:param h_b: Horizontal load parallel to width
:param vertical_load: Vertical load
:param verbose: verbosity
:return: ultimate bearing stress
"""
if not kwargs.get('disable_requires', False):
models.check_required(sl, ['phi_r', 'cohesion', 'unit_dry_weight'])
models.check_required(fd, ['length', 'width', 'depth']) # depends on [control=['if'], data=[]]
horizontal_load = np.sqrt(h_l ** 2 + h_b ** 2)
fd.nq_factor = np.tan(np.pi / 4 + sl.phi_r / 2) ** 2 * np.exp(np.pi * np.tan(sl.phi_r))
if sl.phi_r == 0:
fd.nc_factor = 5.14 # depends on [control=['if'], data=[]]
else:
fd.nc_factor = (fd.nq_factor - 1) / np.tan(sl.phi_r)
fd.ng_factor = (fd.nq_factor - 1) * np.tan(1.4 * sl.phi_r)
if verbose:
log('Nc: ', fd.nc_factor)
log('Nq: ', fd.nq_factor)
log('Ng: ', fd.ng_factor) # depends on [control=['if'], data=[]]
kp = np.tan(np.pi / 4 + sl.phi_r / 2) ** 2
# shape factors
s_c = 1 + 0.2 * kp * fd.width / fd.length
if sl.phi > 10:
s_q = 1.0 + 0.1 * kp * fd.width / fd.length # depends on [control=['if'], data=[]]
else:
s_q = 1.0
s_g = s_q
# depth factors
d_c = 1 + 0.2 * np.sqrt(kp) * fd.depth / fd.width
if sl.phi > 10:
d_q = 1 + 0.1 * np.sqrt(kp) * fd.depth / fd.width # depends on [control=['if'], data=[]]
else:
d_q = 1.0
d_g = d_q
# inclination factors:
theta_load = np.arctan(horizontal_load / vertical_load)
i_c = (1 - theta_load / (np.pi * 0.5)) ** 2
i_q = i_c
if sl.phi > 0:
i_g = (1 - theta_load / sl.phi_r) ** 2 # depends on [control=['if'], data=[]]
else:
i_g = 0
# stress at footing base:
if gwl == 0:
q_d = sl.unit_bouy_weight * fd.depth
unit_weight = sl.unit_bouy_weight # depends on [control=['if'], data=[]]
elif gwl > 0 and gwl < fd.depth:
q_d = sl.unit_dry_weight * gwl + sl.unit_bouy_weight * (fd.depth - gwl)
unit_weight = sl.unit_bouy_weight # depends on [control=['if'], data=[]]
elif gwl >= fd.depth and gwl <= fd.depth + fd.width:
sl.average_unit_bouy_weight = sl.unit_bouy_weight + (gwl - fd.depth) / fd.width * (sl.unit_dry_weight - sl.unit_bouy_weight)
q_d = sl.unit_dry_weight * fd.depth
unit_weight = sl.average_unit_bouy_weight # depends on [control=['if'], data=[]]
elif gwl > fd.depth + fd.width:
q_d = sl.unit_dry_weight * fd.depth
unit_weight = sl.unit_dry_weight # depends on [control=['if'], data=[]]
if verbose:
log('Nc: ', fd.nc_factor)
log('Nq: ', fd.nq_factor)
log('Ng: ', fd.ng_factor)
log('s_c: ', s_c)
log('s_q: ', s_q)
log('s_g: ', s_g)
log('d_c: ', d_c)
log('d_q: ', d_q)
log('d_g: ', d_g)
log('i_c: ', i_c)
log('i_q: ', i_q)
log('i_g: ', i_g)
log('q_d: ', q_d) # depends on [control=['if'], data=[]]
# Capacity
fd.q_ult = sl.cohesion * fd.nc_factor * s_c * d_c * i_c + q_d * fd.nq_factor * s_q * d_q * i_q + 0.5 * fd.width * unit_weight * fd.ng_factor * s_g * d_g * i_g
return fd.q_ult |
def _read_embeddings_from_text_file(file_uri: str,
embedding_dim: int,
vocab: Vocabulary,
namespace: str = "tokens") -> torch.FloatTensor:
"""
Read pre-trained word vectors from an eventually compressed text file, possibly contained
inside an archive with multiple files. The text file is assumed to be utf-8 encoded with
space-separated fields: [word] [dim 1] [dim 2] ...
Lines that contain more numerical tokens than ``embedding_dim`` raise a warning and are skipped.
The remainder of the docstring is identical to ``_read_pretrained_embeddings_file``.
"""
tokens_to_keep = set(vocab.get_index_to_token_vocabulary(namespace).values())
vocab_size = vocab.get_vocab_size(namespace)
embeddings = {}
# First we read the embeddings from the file, only keeping vectors for the words we need.
logger.info("Reading pretrained embeddings from file")
with EmbeddingsTextFile(file_uri) as embeddings_file:
for line in Tqdm.tqdm(embeddings_file):
token = line.split(' ', 1)[0]
if token in tokens_to_keep:
fields = line.rstrip().split(' ')
if len(fields) - 1 != embedding_dim:
# Sometimes there are funny unicode parsing problems that lead to different
# fields lengths (e.g., a word with a unicode space character that splits
# into more than one column). We skip those lines. Note that if you have
# some kind of long header, this could result in all of your lines getting
# skipped. It's hard to check for that here; you just have to look in the
# embedding_misses_file and at the model summary to make sure things look
# like they are supposed to.
logger.warning("Found line with wrong number of dimensions (expected: %d; actual: %d): %s",
embedding_dim, len(fields) - 1, line)
continue
vector = numpy.asarray(fields[1:], dtype='float32')
embeddings[token] = vector
if not embeddings:
raise ConfigurationError("No embeddings of correct dimension found; you probably "
"misspecified your embedding_dim parameter, or didn't "
"pre-populate your Vocabulary")
all_embeddings = numpy.asarray(list(embeddings.values()))
embeddings_mean = float(numpy.mean(all_embeddings))
embeddings_std = float(numpy.std(all_embeddings))
# Now we initialize the weight matrix for an embedding layer, starting with random vectors,
# then filling in the word vectors we just read.
logger.info("Initializing pre-trained embedding layer")
embedding_matrix = torch.FloatTensor(vocab_size, embedding_dim).normal_(embeddings_mean,
embeddings_std)
num_tokens_found = 0
index_to_token = vocab.get_index_to_token_vocabulary(namespace)
for i in range(vocab_size):
token = index_to_token[i]
# If we don't have a pre-trained vector for this word, we'll just leave this row alone,
# so the word has a random initialization.
if token in embeddings:
embedding_matrix[i] = torch.FloatTensor(embeddings[token])
num_tokens_found += 1
else:
logger.debug("Token %s was not found in the embedding file. Initialising randomly.", token)
logger.info("Pretrained embeddings were found for %d out of %d tokens",
num_tokens_found, vocab_size)
return embedding_matrix | def function[_read_embeddings_from_text_file, parameter[file_uri, embedding_dim, vocab, namespace]]:
constant[
Read pre-trained word vectors from an eventually compressed text file, possibly contained
inside an archive with multiple files. The text file is assumed to be utf-8 encoded with
space-separated fields: [word] [dim 1] [dim 2] ...
Lines that contain more numerical tokens than ``embedding_dim`` raise a warning and are skipped.
The remainder of the docstring is identical to ``_read_pretrained_embeddings_file``.
]
variable[tokens_to_keep] assign[=] call[name[set], parameter[call[call[name[vocab].get_index_to_token_vocabulary, parameter[name[namespace]]].values, parameter[]]]]
variable[vocab_size] assign[=] call[name[vocab].get_vocab_size, parameter[name[namespace]]]
variable[embeddings] assign[=] dictionary[[], []]
call[name[logger].info, parameter[constant[Reading pretrained embeddings from file]]]
with call[name[EmbeddingsTextFile], parameter[name[file_uri]]] begin[:]
for taget[name[line]] in starred[call[name[Tqdm].tqdm, parameter[name[embeddings_file]]]] begin[:]
variable[token] assign[=] call[call[name[line].split, parameter[constant[ ], constant[1]]]][constant[0]]
if compare[name[token] in name[tokens_to_keep]] begin[:]
variable[fields] assign[=] call[call[name[line].rstrip, parameter[]].split, parameter[constant[ ]]]
if compare[binary_operation[call[name[len], parameter[name[fields]]] - constant[1]] not_equal[!=] name[embedding_dim]] begin[:]
call[name[logger].warning, parameter[constant[Found line with wrong number of dimensions (expected: %d; actual: %d): %s], name[embedding_dim], binary_operation[call[name[len], parameter[name[fields]]] - constant[1]], name[line]]]
continue
variable[vector] assign[=] call[name[numpy].asarray, parameter[call[name[fields]][<ast.Slice object at 0x7da20c992ec0>]]]
call[name[embeddings]][name[token]] assign[=] name[vector]
if <ast.UnaryOp object at 0x7da20c991d80> begin[:]
<ast.Raise object at 0x7da20c9925c0>
variable[all_embeddings] assign[=] call[name[numpy].asarray, parameter[call[name[list], parameter[call[name[embeddings].values, parameter[]]]]]]
variable[embeddings_mean] assign[=] call[name[float], parameter[call[name[numpy].mean, parameter[name[all_embeddings]]]]]
variable[embeddings_std] assign[=] call[name[float], parameter[call[name[numpy].std, parameter[name[all_embeddings]]]]]
call[name[logger].info, parameter[constant[Initializing pre-trained embedding layer]]]
variable[embedding_matrix] assign[=] call[call[name[torch].FloatTensor, parameter[name[vocab_size], name[embedding_dim]]].normal_, parameter[name[embeddings_mean], name[embeddings_std]]]
variable[num_tokens_found] assign[=] constant[0]
variable[index_to_token] assign[=] call[name[vocab].get_index_to_token_vocabulary, parameter[name[namespace]]]
for taget[name[i]] in starred[call[name[range], parameter[name[vocab_size]]]] begin[:]
variable[token] assign[=] call[name[index_to_token]][name[i]]
if compare[name[token] in name[embeddings]] begin[:]
call[name[embedding_matrix]][name[i]] assign[=] call[name[torch].FloatTensor, parameter[call[name[embeddings]][name[token]]]]
<ast.AugAssign object at 0x7da20c991ab0>
call[name[logger].info, parameter[constant[Pretrained embeddings were found for %d out of %d tokens], name[num_tokens_found], name[vocab_size]]]
return[name[embedding_matrix]] | keyword[def] identifier[_read_embeddings_from_text_file] ( identifier[file_uri] : identifier[str] ,
identifier[embedding_dim] : identifier[int] ,
identifier[vocab] : identifier[Vocabulary] ,
identifier[namespace] : identifier[str] = literal[string] )-> identifier[torch] . identifier[FloatTensor] :
literal[string]
identifier[tokens_to_keep] = identifier[set] ( identifier[vocab] . identifier[get_index_to_token_vocabulary] ( identifier[namespace] ). identifier[values] ())
identifier[vocab_size] = identifier[vocab] . identifier[get_vocab_size] ( identifier[namespace] )
identifier[embeddings] ={}
identifier[logger] . identifier[info] ( literal[string] )
keyword[with] identifier[EmbeddingsTextFile] ( identifier[file_uri] ) keyword[as] identifier[embeddings_file] :
keyword[for] identifier[line] keyword[in] identifier[Tqdm] . identifier[tqdm] ( identifier[embeddings_file] ):
identifier[token] = identifier[line] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
keyword[if] identifier[token] keyword[in] identifier[tokens_to_keep] :
identifier[fields] = identifier[line] . identifier[rstrip] (). identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[fields] )- literal[int] != identifier[embedding_dim] :
identifier[logger] . identifier[warning] ( literal[string] ,
identifier[embedding_dim] , identifier[len] ( identifier[fields] )- literal[int] , identifier[line] )
keyword[continue]
identifier[vector] = identifier[numpy] . identifier[asarray] ( identifier[fields] [ literal[int] :], identifier[dtype] = literal[string] )
identifier[embeddings] [ identifier[token] ]= identifier[vector]
keyword[if] keyword[not] identifier[embeddings] :
keyword[raise] identifier[ConfigurationError] ( literal[string]
literal[string]
literal[string] )
identifier[all_embeddings] = identifier[numpy] . identifier[asarray] ( identifier[list] ( identifier[embeddings] . identifier[values] ()))
identifier[embeddings_mean] = identifier[float] ( identifier[numpy] . identifier[mean] ( identifier[all_embeddings] ))
identifier[embeddings_std] = identifier[float] ( identifier[numpy] . identifier[std] ( identifier[all_embeddings] ))
identifier[logger] . identifier[info] ( literal[string] )
identifier[embedding_matrix] = identifier[torch] . identifier[FloatTensor] ( identifier[vocab_size] , identifier[embedding_dim] ). identifier[normal_] ( identifier[embeddings_mean] ,
identifier[embeddings_std] )
identifier[num_tokens_found] = literal[int]
identifier[index_to_token] = identifier[vocab] . identifier[get_index_to_token_vocabulary] ( identifier[namespace] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[vocab_size] ):
identifier[token] = identifier[index_to_token] [ identifier[i] ]
keyword[if] identifier[token] keyword[in] identifier[embeddings] :
identifier[embedding_matrix] [ identifier[i] ]= identifier[torch] . identifier[FloatTensor] ( identifier[embeddings] [ identifier[token] ])
identifier[num_tokens_found] += literal[int]
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[token] )
identifier[logger] . identifier[info] ( literal[string] ,
identifier[num_tokens_found] , identifier[vocab_size] )
keyword[return] identifier[embedding_matrix] | def _read_embeddings_from_text_file(file_uri: str, embedding_dim: int, vocab: Vocabulary, namespace: str='tokens') -> torch.FloatTensor:
"""
Read pre-trained word vectors from an eventually compressed text file, possibly contained
inside an archive with multiple files. The text file is assumed to be utf-8 encoded with
space-separated fields: [word] [dim 1] [dim 2] ...
Lines that contain more numerical tokens than ``embedding_dim`` raise a warning and are skipped.
The remainder of the docstring is identical to ``_read_pretrained_embeddings_file``.
"""
tokens_to_keep = set(vocab.get_index_to_token_vocabulary(namespace).values())
vocab_size = vocab.get_vocab_size(namespace)
embeddings = {}
# First we read the embeddings from the file, only keeping vectors for the words we need.
logger.info('Reading pretrained embeddings from file')
with EmbeddingsTextFile(file_uri) as embeddings_file:
for line in Tqdm.tqdm(embeddings_file):
token = line.split(' ', 1)[0]
if token in tokens_to_keep:
fields = line.rstrip().split(' ')
if len(fields) - 1 != embedding_dim:
# Sometimes there are funny unicode parsing problems that lead to different
# fields lengths (e.g., a word with a unicode space character that splits
# into more than one column). We skip those lines. Note that if you have
# some kind of long header, this could result in all of your lines getting
# skipped. It's hard to check for that here; you just have to look in the
# embedding_misses_file and at the model summary to make sure things look
# like they are supposed to.
logger.warning('Found line with wrong number of dimensions (expected: %d; actual: %d): %s', embedding_dim, len(fields) - 1, line)
continue # depends on [control=['if'], data=['embedding_dim']]
vector = numpy.asarray(fields[1:], dtype='float32')
embeddings[token] = vector # depends on [control=['if'], data=['token']] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['embeddings_file']]
if not embeddings:
raise ConfigurationError("No embeddings of correct dimension found; you probably misspecified your embedding_dim parameter, or didn't pre-populate your Vocabulary") # depends on [control=['if'], data=[]]
all_embeddings = numpy.asarray(list(embeddings.values()))
embeddings_mean = float(numpy.mean(all_embeddings))
embeddings_std = float(numpy.std(all_embeddings))
# Now we initialize the weight matrix for an embedding layer, starting with random vectors,
# then filling in the word vectors we just read.
logger.info('Initializing pre-trained embedding layer')
embedding_matrix = torch.FloatTensor(vocab_size, embedding_dim).normal_(embeddings_mean, embeddings_std)
num_tokens_found = 0
index_to_token = vocab.get_index_to_token_vocabulary(namespace)
for i in range(vocab_size):
token = index_to_token[i]
# If we don't have a pre-trained vector for this word, we'll just leave this row alone,
# so the word has a random initialization.
if token in embeddings:
embedding_matrix[i] = torch.FloatTensor(embeddings[token])
num_tokens_found += 1 # depends on [control=['if'], data=['token', 'embeddings']]
else:
logger.debug('Token %s was not found in the embedding file. Initialising randomly.', token) # depends on [control=['for'], data=['i']]
logger.info('Pretrained embeddings were found for %d out of %d tokens', num_tokens_found, vocab_size)
return embedding_matrix |
def file_transfer_protocol_encode(self, target_network, target_system, target_component, payload):
'''
File transfer message
target_network : Network ID (0 for broadcast) (uint8_t)
target_system : System ID (0 for broadcast) (uint8_t)
target_component : Component ID (0 for broadcast) (uint8_t)
payload : Variable length payload. The length is defined by the remaining message length when subtracting the header and other fields. The entire content of this block is opaque unless you understand any the encoding message_type. The particular encoding used can be extension specific and might not always be documented as part of the mavlink specification. (uint8_t)
'''
return MAVLink_file_transfer_protocol_message(target_network, target_system, target_component, payload) | def function[file_transfer_protocol_encode, parameter[self, target_network, target_system, target_component, payload]]:
constant[
File transfer message
target_network : Network ID (0 for broadcast) (uint8_t)
target_system : System ID (0 for broadcast) (uint8_t)
target_component : Component ID (0 for broadcast) (uint8_t)
payload : Variable length payload. The length is defined by the remaining message length when subtracting the header and other fields. The entire content of this block is opaque unless you understand any the encoding message_type. The particular encoding used can be extension specific and might not always be documented as part of the mavlink specification. (uint8_t)
]
return[call[name[MAVLink_file_transfer_protocol_message], parameter[name[target_network], name[target_system], name[target_component], name[payload]]]] | keyword[def] identifier[file_transfer_protocol_encode] ( identifier[self] , identifier[target_network] , identifier[target_system] , identifier[target_component] , identifier[payload] ):
literal[string]
keyword[return] identifier[MAVLink_file_transfer_protocol_message] ( identifier[target_network] , identifier[target_system] , identifier[target_component] , identifier[payload] ) | def file_transfer_protocol_encode(self, target_network, target_system, target_component, payload):
"""
File transfer message
target_network : Network ID (0 for broadcast) (uint8_t)
target_system : System ID (0 for broadcast) (uint8_t)
target_component : Component ID (0 for broadcast) (uint8_t)
payload : Variable length payload. The length is defined by the remaining message length when subtracting the header and other fields. The entire content of this block is opaque unless you understand any the encoding message_type. The particular encoding used can be extension specific and might not always be documented as part of the mavlink specification. (uint8_t)
"""
return MAVLink_file_transfer_protocol_message(target_network, target_system, target_component, payload) |
def set_source_nodes(self, source_nodes):
r"""
Set the source nodes and compute their t-weights.
Parameters
----------
source_nodes : sequence of integers
Declare the source nodes via their ids.
Notes
-----
It does not get checked if one of the supplied source-nodes already has
a weight assigned (e.g. by passing it to `set_sink_nodes`). This can
occur when the foreground- and background-markers cover the same region. In this
case the order of setting the terminal nodes can affect the graph and therefore
the graph-cut result.
"""
self.__snodes = list(source_nodes)
# set the source-to-node weights (t-weights)
for snode in self.__snodes:
self.__tweights[snode] = (self.MAX, 0) | def function[set_source_nodes, parameter[self, source_nodes]]:
constant[
Set the source nodes and compute their t-weights.
Parameters
----------
source_nodes : sequence of integers
Declare the source nodes via their ids.
Notes
-----
It does not get checked if one of the supplied source-nodes already has
a weight assigned (e.g. by passing it to `set_sink_nodes`). This can
occur when the foreground- and background-markers cover the same region. In this
case the order of setting the terminal nodes can affect the graph and therefore
the graph-cut result.
]
name[self].__snodes assign[=] call[name[list], parameter[name[source_nodes]]]
for taget[name[snode]] in starred[name[self].__snodes] begin[:]
call[name[self].__tweights][name[snode]] assign[=] tuple[[<ast.Attribute object at 0x7da18f09e380>, <ast.Constant object at 0x7da18f09ee60>]] | keyword[def] identifier[set_source_nodes] ( identifier[self] , identifier[source_nodes] ):
literal[string]
identifier[self] . identifier[__snodes] = identifier[list] ( identifier[source_nodes] )
keyword[for] identifier[snode] keyword[in] identifier[self] . identifier[__snodes] :
identifier[self] . identifier[__tweights] [ identifier[snode] ]=( identifier[self] . identifier[MAX] , literal[int] ) | def set_source_nodes(self, source_nodes):
"""
Set the source nodes and compute their t-weights.
Parameters
----------
source_nodes : sequence of integers
Declare the source nodes via their ids.
Notes
-----
It does not get checked if one of the supplied source-nodes already has
a weight assigned (e.g. by passing it to `set_sink_nodes`). This can
occur when the foreground- and background-markers cover the same region. In this
case the order of setting the terminal nodes can affect the graph and therefore
the graph-cut result.
"""
self.__snodes = list(source_nodes)
# set the source-to-node weights (t-weights)
for snode in self.__snodes:
self.__tweights[snode] = (self.MAX, 0) # depends on [control=['for'], data=['snode']] |
def change_password(self, previous_password, proposed_password):
"""
Change the User password
"""
self.check_token()
response = self.client.change_password(
PreviousPassword=previous_password,
ProposedPassword=proposed_password,
AccessToken=self.access_token
)
self._set_attributes(response, {'password': proposed_password}) | def function[change_password, parameter[self, previous_password, proposed_password]]:
constant[
Change the User password
]
call[name[self].check_token, parameter[]]
variable[response] assign[=] call[name[self].client.change_password, parameter[]]
call[name[self]._set_attributes, parameter[name[response], dictionary[[<ast.Constant object at 0x7da1b22e9720>], [<ast.Name object at 0x7da1b22eafe0>]]]] | keyword[def] identifier[change_password] ( identifier[self] , identifier[previous_password] , identifier[proposed_password] ):
literal[string]
identifier[self] . identifier[check_token] ()
identifier[response] = identifier[self] . identifier[client] . identifier[change_password] (
identifier[PreviousPassword] = identifier[previous_password] ,
identifier[ProposedPassword] = identifier[proposed_password] ,
identifier[AccessToken] = identifier[self] . identifier[access_token]
)
identifier[self] . identifier[_set_attributes] ( identifier[response] ,{ literal[string] : identifier[proposed_password] }) | def change_password(self, previous_password, proposed_password):
"""
Change the User password
"""
self.check_token()
response = self.client.change_password(PreviousPassword=previous_password, ProposedPassword=proposed_password, AccessToken=self.access_token)
self._set_attributes(response, {'password': proposed_password}) |
def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs) | def function[findNextSiblings, parameter[self, name, attrs, text, limit]]:
constant[Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document.]
return[call[name[self]._findAll, parameter[name[name], name[attrs], name[text], name[limit], name[self].nextSiblingGenerator]]] | keyword[def] identifier[findNextSiblings] ( identifier[self] , identifier[name] = keyword[None] , identifier[attrs] ={}, identifier[text] = keyword[None] , identifier[limit] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_findAll] ( identifier[name] , identifier[attrs] , identifier[text] , identifier[limit] ,
identifier[self] . identifier[nextSiblingGenerator] ,** identifier[kwargs] ) | def findNextSiblings(self, name=None, attrs={}, text=None, limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.nextSiblingGenerator, **kwargs) |
def partial_fit(self, counts_df, batch_type='users', step_size=None,
nusers=None, nitems=None, users_in_batch=None, items_in_batch=None,
new_users=False, new_items=False, random_seed=None):
"""
Updates the model with batches of data from a subset of users or items
Note
----
You must pass either the **full set of user-item interactions** that are non-zero for some
subset of users, or the **full set of item-user intersactions** that are non-zero for some
subset of items.
Otherwise, if passing a random sample of triplets, the model will not converge to reasonable results.
Note
----
All user and items IDs must be integers starting at one, without gaps in the numeration.
Note
----
For better results, fit the model with full-batch iterations (using the 'fit' method).
Adding new users and/or items without refitting the model might result in worsened results
for existing users/items. For adding users without altering the parameters for items or for
other users, see the method 'add_user'.
Note
----
Fitting in mini-batches is more prone to numerical instability and compared to full-batch
variational inference, it is more likely that all your parameters will turn to NaNs (which
means the optimization procedure failed).
Parameters
----------
counts_df : data frame (n_samples, 3)
Data frame with the user-item interactions for some subset of users. Must have columns
'UserId', 'ItemId', 'Count'.
batch_type : str, one of 'users' or 'items'
Whether 'counts_df' contains a sample of users with all their item counts ('users'), or a
sample of items with all their user counts ('items').
step_size : None or float in (0, 1)
Step size with which to update the global variables in the model. Must be a number between
zero and one. If passing None, will determine it according to the step size function with which
the model was initialized and the number of iterations or calls to partial fit that have been
performed. If no valid function was passed at the initialization, it will use 1/sqrt(i+1).
nusers : int
Total number of users (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
nitems : int
Total number of items (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
users_in_batch : None or array (n_users_sample,)
Users that are present int counts_df. If passing None, will determine the unique elements in
counts_df.UserId, but passing them if you already have them will skip this step.
items_in_batch : None or array (n_items_sample,)
Items that are present int counts_df. If passing None, will determine the unique elements in
counts_df.ItemId, but passing them if you already have them will skip this step.
new_users : bool
Whether the data contains new users with numeration greater than the number of users with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
new_items : bool
Whether the data contains new items with numeration greater than the number of items with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
random_seed : int
Random seed to be used for the initialization of new user/item parameters. Ignored when
new_users=False and new_items=False.
Returns
-------
self : obj
Copy of this object.
"""
if self.reindex:
raise ValueError("'partial_fit' can only be called when using reindex=False.")
if not self.keep_all_objs:
raise ValueError("'partial_fit' can only be called when using keep_all_objs=True.")
if self.keep_data:
try:
self.seen
msg = "When using 'partial_fit', the list of items seen by each user is not updated "
msg += "with the data passed here."
warnings.warn(msg)
except:
msg = "When fitting the model through 'partial_fit' without calling 'fit' beforehand, "
msg += "'keep_data' will be forced to False."
warnings.warn(msg)
self.keep_data = False
assert batch_type in ['users', 'items']
if batch_type == 'users':
user_batch = True
else:
user_batch = False
if nusers is None:
try:
nusers = self.nusers
except:
raise ValueError("Must specify total number of users when calling 'partial_fit' for the first time.")
if nitems is None:
try:
nitems = self.nitems
except:
raise ValueError("Must specify total number of items when calling 'partial_fit' for the first time.")
try:
if self.nusers is None:
self.nusers = nusers
except:
self.nusers = nusers
try:
if self.nitems is None:
self.nitems = nitems
except:
self.nitems = nitems
if step_size is None:
try:
self.step_size(0)
try:
step_size = self.step_size(self.niter)
except:
self.niter = 0
step_size = 1.0
except:
try:
step_size = 1 / np.sqrt(self.niter + 2)
except:
self.niter = 0
step_size = 1.0
assert step_size >= 0
assert step_size <= 1
if random_seed is not None:
if isinstance(random_seed, float):
random_seed = int(random_seed)
assert isinstance(random_seed, int)
if counts_df.__class__.__name__ == "ndarray":
counts_df = pd.DataFrame(counts_df)
counts_df.columns[:3] = ['UserId', 'ItemId', 'Count']
assert counts_df.__class__.__name__ == "DataFrame"
assert 'UserId' in counts_df.columns.values
assert 'ItemId' in counts_df.columns.values
assert 'Count' in counts_df.columns.values
assert counts_df.shape[0] > 0
Y_batch = counts_df.Count.values.astype('float32')
ix_u_batch = counts_df.UserId.values.astype(cython_loops.obj_ind_type)
ix_i_batch = counts_df.ItemId.values.astype(cython_loops.obj_ind_type)
if users_in_batch is None:
users_in_batch = np.unique(ix_u_batch)
else:
users_in_batch = np.array(users_in_batch).astype(cython_loops.obj_ind_type)
if items_in_batch is None:
items_in_batch = np.unique(ix_i_batch)
else:
items_in_batch = np.array(items_in_batch).astype(cython_loops.obj_ind_type)
if (self.Theta is None) or (self.Beta is None):
self._cast_before_fit()
self.Gamma_shp, self.Gamma_rte, self.Lambda_shp, self.Lambda_rte, \
self.k_rte, self.t_rte = cython_loops.initialize_parameters(
self.Theta, self.Beta, self.random_seed, self.a, self.a_prime,
self.b_prime, self.c, self.c_prime, self.d_prime)
self.Theta = self.Gamma_shp / self.Gamma_rte
self.Beta = self.Lambda_shp / self.Lambda_rte
if new_users:
if not self.keep_all_objs:
raise ValueError("Can only add users without refitting when using keep_all_objs=True")
nusers_now = ix_u_batch.max() + 1
nusers_add = self.nusers - nusers_now
if nusers_add < 1:
raise ValueError("There are no new users in the data passed to 'partial_fit'.")
self._initialize_extra_users(nusers_add, random_seed)
self.nusers += nusers_add
if new_items:
if not self.keep_all_objs:
raise ValueError("Can only add items without refitting when using keep_all_objs=True")
nitems_now = ix_i_batch.max() + 1
nitems_add = self.nitems - nitems_now
if nitems_add < 1:
raise ValueError("There are no new items in the data passed to 'partial_fit'.")
self._initialize_extra_items(nitems_add, random_seed)
self.nitems += nitems_add
k_shp = cython_loops.cast_float(self.a_prime + self.k * self.a)
t_shp = cython_loops.cast_float(self.c_prime + self.k * self.c)
add_k_rte = cython_loops.cast_float(self.a_prime / self.b_prime)
add_t_rte = cython_loops.cast_float(self.c_prime / self.d_prime)
multiplier_batch = float(nusers) / users_in_batch.shape[0]
cython_loops.partial_fit(
Y_batch,
ix_u_batch, ix_i_batch,
self.Theta, self.Beta,
self.Gamma_shp, self.Gamma_rte,
self.Lambda_shp, self.Lambda_rte,
self.k_rte, self.t_rte,
add_k_rte, add_t_rte, self.a, self.c,
k_shp, t_shp, cython_loops.cast_ind_type(self.k),
users_in_batch, items_in_batch,
cython_loops.cast_int(self.allow_inconsistent_math),
cython_loops.cast_float(step_size), cython_loops.cast_float(multiplier_batch),
self.ncores, user_batch
)
self.niter += 1
self.is_fitted = True
return self | def function[partial_fit, parameter[self, counts_df, batch_type, step_size, nusers, nitems, users_in_batch, items_in_batch, new_users, new_items, random_seed]]:
constant[
Updates the model with batches of data from a subset of users or items
Note
----
You must pass either the **full set of user-item interactions** that are non-zero for some
subset of users, or the **full set of item-user intersactions** that are non-zero for some
subset of items.
Otherwise, if passing a random sample of triplets, the model will not converge to reasonable results.
Note
----
All user and items IDs must be integers starting at one, without gaps in the numeration.
Note
----
For better results, fit the model with full-batch iterations (using the 'fit' method).
Adding new users and/or items without refitting the model might result in worsened results
for existing users/items. For adding users without altering the parameters for items or for
other users, see the method 'add_user'.
Note
----
Fitting in mini-batches is more prone to numerical instability and compared to full-batch
variational inference, it is more likely that all your parameters will turn to NaNs (which
means the optimization procedure failed).
Parameters
----------
counts_df : data frame (n_samples, 3)
Data frame with the user-item interactions for some subset of users. Must have columns
'UserId', 'ItemId', 'Count'.
batch_type : str, one of 'users' or 'items'
Whether 'counts_df' contains a sample of users with all their item counts ('users'), or a
sample of items with all their user counts ('items').
step_size : None or float in (0, 1)
Step size with which to update the global variables in the model. Must be a number between
zero and one. If passing None, will determine it according to the step size function with which
the model was initialized and the number of iterations or calls to partial fit that have been
performed. If no valid function was passed at the initialization, it will use 1/sqrt(i+1).
nusers : int
Total number of users (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
nitems : int
Total number of items (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
users_in_batch : None or array (n_users_sample,)
Users that are present int counts_df. If passing None, will determine the unique elements in
counts_df.UserId, but passing them if you already have them will skip this step.
items_in_batch : None or array (n_items_sample,)
Items that are present int counts_df. If passing None, will determine the unique elements in
counts_df.ItemId, but passing them if you already have them will skip this step.
new_users : bool
Whether the data contains new users with numeration greater than the number of users with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
new_items : bool
Whether the data contains new items with numeration greater than the number of items with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
random_seed : int
Random seed to be used for the initialization of new user/item parameters. Ignored when
new_users=False and new_items=False.
Returns
-------
self : obj
Copy of this object.
]
if name[self].reindex begin[:]
<ast.Raise object at 0x7da1b0ef7a60>
if <ast.UnaryOp object at 0x7da1b0ef7970> begin[:]
<ast.Raise object at 0x7da1b0ef78e0>
if name[self].keep_data begin[:]
<ast.Try object at 0x7da1b0ef7790>
assert[compare[name[batch_type] in list[[<ast.Constant object at 0x7da1b0ef7070>, <ast.Constant object at 0x7da1b0ef7040>]]]]
if compare[name[batch_type] equal[==] constant[users]] begin[:]
variable[user_batch] assign[=] constant[True]
if compare[name[nusers] is constant[None]] begin[:]
<ast.Try object at 0x7da1b0ef6d70>
if compare[name[nitems] is constant[None]] begin[:]
<ast.Try object at 0x7da1b0ef6ad0>
<ast.Try object at 0x7da1b0ef4070>
<ast.Try object at 0x7da1b0ed02e0>
if compare[name[step_size] is constant[None]] begin[:]
<ast.Try object at 0x7da1b0ed3c40>
assert[compare[name[step_size] greater_or_equal[>=] constant[0]]]
assert[compare[name[step_size] less_or_equal[<=] constant[1]]]
if compare[name[random_seed] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[random_seed], name[float]]] begin[:]
variable[random_seed] assign[=] call[name[int], parameter[name[random_seed]]]
assert[call[name[isinstance], parameter[name[random_seed], name[int]]]]
if compare[name[counts_df].__class__.__name__ equal[==] constant[ndarray]] begin[:]
variable[counts_df] assign[=] call[name[pd].DataFrame, parameter[name[counts_df]]]
call[name[counts_df].columns][<ast.Slice object at 0x7da1b0b2f760>] assign[=] list[[<ast.Constant object at 0x7da1b0b2f7f0>, <ast.Constant object at 0x7da1b0b2f2b0>, <ast.Constant object at 0x7da1b0b2ef80>]]
assert[compare[name[counts_df].__class__.__name__ equal[==] constant[DataFrame]]]
assert[compare[constant[UserId] in name[counts_df].columns.values]]
assert[compare[constant[ItemId] in name[counts_df].columns.values]]
assert[compare[constant[Count] in name[counts_df].columns.values]]
assert[compare[call[name[counts_df].shape][constant[0]] greater[>] constant[0]]]
variable[Y_batch] assign[=] call[name[counts_df].Count.values.astype, parameter[constant[float32]]]
variable[ix_u_batch] assign[=] call[name[counts_df].UserId.values.astype, parameter[name[cython_loops].obj_ind_type]]
variable[ix_i_batch] assign[=] call[name[counts_df].ItemId.values.astype, parameter[name[cython_loops].obj_ind_type]]
if compare[name[users_in_batch] is constant[None]] begin[:]
variable[users_in_batch] assign[=] call[name[np].unique, parameter[name[ix_u_batch]]]
if compare[name[items_in_batch] is constant[None]] begin[:]
variable[items_in_batch] assign[=] call[name[np].unique, parameter[name[ix_i_batch]]]
if <ast.BoolOp object at 0x7da1b0ed2980> begin[:]
call[name[self]._cast_before_fit, parameter[]]
<ast.Tuple object at 0x7da1b0ed1870> assign[=] call[name[cython_loops].initialize_parameters, parameter[name[self].Theta, name[self].Beta, name[self].random_seed, name[self].a, name[self].a_prime, name[self].b_prime, name[self].c, name[self].c_prime, name[self].d_prime]]
name[self].Theta assign[=] binary_operation[name[self].Gamma_shp / name[self].Gamma_rte]
name[self].Beta assign[=] binary_operation[name[self].Lambda_shp / name[self].Lambda_rte]
if name[new_users] begin[:]
if <ast.UnaryOp object at 0x7da1b0ed15d0> begin[:]
<ast.Raise object at 0x7da1b0ed26b0>
variable[nusers_now] assign[=] binary_operation[call[name[ix_u_batch].max, parameter[]] + constant[1]]
variable[nusers_add] assign[=] binary_operation[name[self].nusers - name[nusers_now]]
if compare[name[nusers_add] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0e27c10>
call[name[self]._initialize_extra_users, parameter[name[nusers_add], name[random_seed]]]
<ast.AugAssign object at 0x7da1b0e25120>
if name[new_items] begin[:]
if <ast.UnaryOp object at 0x7da1b0e265f0> begin[:]
<ast.Raise object at 0x7da1b0e27e50>
variable[nitems_now] assign[=] binary_operation[call[name[ix_i_batch].max, parameter[]] + constant[1]]
variable[nitems_add] assign[=] binary_operation[name[self].nitems - name[nitems_now]]
if compare[name[nitems_add] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0e27d60>
call[name[self]._initialize_extra_items, parameter[name[nitems_add], name[random_seed]]]
<ast.AugAssign object at 0x7da1b0e257e0>
variable[k_shp] assign[=] call[name[cython_loops].cast_float, parameter[binary_operation[name[self].a_prime + binary_operation[name[self].k * name[self].a]]]]
variable[t_shp] assign[=] call[name[cython_loops].cast_float, parameter[binary_operation[name[self].c_prime + binary_operation[name[self].k * name[self].c]]]]
variable[add_k_rte] assign[=] call[name[cython_loops].cast_float, parameter[binary_operation[name[self].a_prime / name[self].b_prime]]]
variable[add_t_rte] assign[=] call[name[cython_loops].cast_float, parameter[binary_operation[name[self].c_prime / name[self].d_prime]]]
variable[multiplier_batch] assign[=] binary_operation[call[name[float], parameter[name[nusers]]] / call[name[users_in_batch].shape][constant[0]]]
call[name[cython_loops].partial_fit, parameter[name[Y_batch], name[ix_u_batch], name[ix_i_batch], name[self].Theta, name[self].Beta, name[self].Gamma_shp, name[self].Gamma_rte, name[self].Lambda_shp, name[self].Lambda_rte, name[self].k_rte, name[self].t_rte, name[add_k_rte], name[add_t_rte], name[self].a, name[self].c, name[k_shp], name[t_shp], call[name[cython_loops].cast_ind_type, parameter[name[self].k]], name[users_in_batch], name[items_in_batch], call[name[cython_loops].cast_int, parameter[name[self].allow_inconsistent_math]], call[name[cython_loops].cast_float, parameter[name[step_size]]], call[name[cython_loops].cast_float, parameter[name[multiplier_batch]]], name[self].ncores, name[user_batch]]]
<ast.AugAssign object at 0x7da1b0e277f0>
name[self].is_fitted assign[=] constant[True]
return[name[self]] | keyword[def] identifier[partial_fit] ( identifier[self] , identifier[counts_df] , identifier[batch_type] = literal[string] , identifier[step_size] = keyword[None] ,
identifier[nusers] = keyword[None] , identifier[nitems] = keyword[None] , identifier[users_in_batch] = keyword[None] , identifier[items_in_batch] = keyword[None] ,
identifier[new_users] = keyword[False] , identifier[new_items] = keyword[False] , identifier[random_seed] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[reindex] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[keep_all_objs] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[keep_data] :
keyword[try] :
identifier[self] . identifier[seen]
identifier[msg] = literal[string]
identifier[msg] += literal[string]
identifier[warnings] . identifier[warn] ( identifier[msg] )
keyword[except] :
identifier[msg] = literal[string]
identifier[msg] += literal[string]
identifier[warnings] . identifier[warn] ( identifier[msg] )
identifier[self] . identifier[keep_data] = keyword[False]
keyword[assert] identifier[batch_type] keyword[in] [ literal[string] , literal[string] ]
keyword[if] identifier[batch_type] == literal[string] :
identifier[user_batch] = keyword[True]
keyword[else] :
identifier[user_batch] = keyword[False]
keyword[if] identifier[nusers] keyword[is] keyword[None] :
keyword[try] :
identifier[nusers] = identifier[self] . identifier[nusers]
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[nitems] keyword[is] keyword[None] :
keyword[try] :
identifier[nitems] = identifier[self] . identifier[nitems]
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
keyword[if] identifier[self] . identifier[nusers] keyword[is] keyword[None] :
identifier[self] . identifier[nusers] = identifier[nusers]
keyword[except] :
identifier[self] . identifier[nusers] = identifier[nusers]
keyword[try] :
keyword[if] identifier[self] . identifier[nitems] keyword[is] keyword[None] :
identifier[self] . identifier[nitems] = identifier[nitems]
keyword[except] :
identifier[self] . identifier[nitems] = identifier[nitems]
keyword[if] identifier[step_size] keyword[is] keyword[None] :
keyword[try] :
identifier[self] . identifier[step_size] ( literal[int] )
keyword[try] :
identifier[step_size] = identifier[self] . identifier[step_size] ( identifier[self] . identifier[niter] )
keyword[except] :
identifier[self] . identifier[niter] = literal[int]
identifier[step_size] = literal[int]
keyword[except] :
keyword[try] :
identifier[step_size] = literal[int] / identifier[np] . identifier[sqrt] ( identifier[self] . identifier[niter] + literal[int] )
keyword[except] :
identifier[self] . identifier[niter] = literal[int]
identifier[step_size] = literal[int]
keyword[assert] identifier[step_size] >= literal[int]
keyword[assert] identifier[step_size] <= literal[int]
keyword[if] identifier[random_seed] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[random_seed] , identifier[float] ):
identifier[random_seed] = identifier[int] ( identifier[random_seed] )
keyword[assert] identifier[isinstance] ( identifier[random_seed] , identifier[int] )
keyword[if] identifier[counts_df] . identifier[__class__] . identifier[__name__] == literal[string] :
identifier[counts_df] = identifier[pd] . identifier[DataFrame] ( identifier[counts_df] )
identifier[counts_df] . identifier[columns] [: literal[int] ]=[ literal[string] , literal[string] , literal[string] ]
keyword[assert] identifier[counts_df] . identifier[__class__] . identifier[__name__] == literal[string]
keyword[assert] literal[string] keyword[in] identifier[counts_df] . identifier[columns] . identifier[values]
keyword[assert] literal[string] keyword[in] identifier[counts_df] . identifier[columns] . identifier[values]
keyword[assert] literal[string] keyword[in] identifier[counts_df] . identifier[columns] . identifier[values]
keyword[assert] identifier[counts_df] . identifier[shape] [ literal[int] ]> literal[int]
identifier[Y_batch] = identifier[counts_df] . identifier[Count] . identifier[values] . identifier[astype] ( literal[string] )
identifier[ix_u_batch] = identifier[counts_df] . identifier[UserId] . identifier[values] . identifier[astype] ( identifier[cython_loops] . identifier[obj_ind_type] )
identifier[ix_i_batch] = identifier[counts_df] . identifier[ItemId] . identifier[values] . identifier[astype] ( identifier[cython_loops] . identifier[obj_ind_type] )
keyword[if] identifier[users_in_batch] keyword[is] keyword[None] :
identifier[users_in_batch] = identifier[np] . identifier[unique] ( identifier[ix_u_batch] )
keyword[else] :
identifier[users_in_batch] = identifier[np] . identifier[array] ( identifier[users_in_batch] ). identifier[astype] ( identifier[cython_loops] . identifier[obj_ind_type] )
keyword[if] identifier[items_in_batch] keyword[is] keyword[None] :
identifier[items_in_batch] = identifier[np] . identifier[unique] ( identifier[ix_i_batch] )
keyword[else] :
identifier[items_in_batch] = identifier[np] . identifier[array] ( identifier[items_in_batch] ). identifier[astype] ( identifier[cython_loops] . identifier[obj_ind_type] )
keyword[if] ( identifier[self] . identifier[Theta] keyword[is] keyword[None] ) keyword[or] ( identifier[self] . identifier[Beta] keyword[is] keyword[None] ):
identifier[self] . identifier[_cast_before_fit] ()
identifier[self] . identifier[Gamma_shp] , identifier[self] . identifier[Gamma_rte] , identifier[self] . identifier[Lambda_shp] , identifier[self] . identifier[Lambda_rte] , identifier[self] . identifier[k_rte] , identifier[self] . identifier[t_rte] = identifier[cython_loops] . identifier[initialize_parameters] (
identifier[self] . identifier[Theta] , identifier[self] . identifier[Beta] , identifier[self] . identifier[random_seed] , identifier[self] . identifier[a] , identifier[self] . identifier[a_prime] ,
identifier[self] . identifier[b_prime] , identifier[self] . identifier[c] , identifier[self] . identifier[c_prime] , identifier[self] . identifier[d_prime] )
identifier[self] . identifier[Theta] = identifier[self] . identifier[Gamma_shp] / identifier[self] . identifier[Gamma_rte]
identifier[self] . identifier[Beta] = identifier[self] . identifier[Lambda_shp] / identifier[self] . identifier[Lambda_rte]
keyword[if] identifier[new_users] :
keyword[if] keyword[not] identifier[self] . identifier[keep_all_objs] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[nusers_now] = identifier[ix_u_batch] . identifier[max] ()+ literal[int]
identifier[nusers_add] = identifier[self] . identifier[nusers] - identifier[nusers_now]
keyword[if] identifier[nusers_add] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_initialize_extra_users] ( identifier[nusers_add] , identifier[random_seed] )
identifier[self] . identifier[nusers] += identifier[nusers_add]
keyword[if] identifier[new_items] :
keyword[if] keyword[not] identifier[self] . identifier[keep_all_objs] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[nitems_now] = identifier[ix_i_batch] . identifier[max] ()+ literal[int]
identifier[nitems_add] = identifier[self] . identifier[nitems] - identifier[nitems_now]
keyword[if] identifier[nitems_add] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_initialize_extra_items] ( identifier[nitems_add] , identifier[random_seed] )
identifier[self] . identifier[nitems] += identifier[nitems_add]
identifier[k_shp] = identifier[cython_loops] . identifier[cast_float] ( identifier[self] . identifier[a_prime] + identifier[self] . identifier[k] * identifier[self] . identifier[a] )
identifier[t_shp] = identifier[cython_loops] . identifier[cast_float] ( identifier[self] . identifier[c_prime] + identifier[self] . identifier[k] * identifier[self] . identifier[c] )
identifier[add_k_rte] = identifier[cython_loops] . identifier[cast_float] ( identifier[self] . identifier[a_prime] / identifier[self] . identifier[b_prime] )
identifier[add_t_rte] = identifier[cython_loops] . identifier[cast_float] ( identifier[self] . identifier[c_prime] / identifier[self] . identifier[d_prime] )
identifier[multiplier_batch] = identifier[float] ( identifier[nusers] )/ identifier[users_in_batch] . identifier[shape] [ literal[int] ]
identifier[cython_loops] . identifier[partial_fit] (
identifier[Y_batch] ,
identifier[ix_u_batch] , identifier[ix_i_batch] ,
identifier[self] . identifier[Theta] , identifier[self] . identifier[Beta] ,
identifier[self] . identifier[Gamma_shp] , identifier[self] . identifier[Gamma_rte] ,
identifier[self] . identifier[Lambda_shp] , identifier[self] . identifier[Lambda_rte] ,
identifier[self] . identifier[k_rte] , identifier[self] . identifier[t_rte] ,
identifier[add_k_rte] , identifier[add_t_rte] , identifier[self] . identifier[a] , identifier[self] . identifier[c] ,
identifier[k_shp] , identifier[t_shp] , identifier[cython_loops] . identifier[cast_ind_type] ( identifier[self] . identifier[k] ),
identifier[users_in_batch] , identifier[items_in_batch] ,
identifier[cython_loops] . identifier[cast_int] ( identifier[self] . identifier[allow_inconsistent_math] ),
identifier[cython_loops] . identifier[cast_float] ( identifier[step_size] ), identifier[cython_loops] . identifier[cast_float] ( identifier[multiplier_batch] ),
identifier[self] . identifier[ncores] , identifier[user_batch]
)
identifier[self] . identifier[niter] += literal[int]
identifier[self] . identifier[is_fitted] = keyword[True]
keyword[return] identifier[self] | def partial_fit(self, counts_df, batch_type='users', step_size=None, nusers=None, nitems=None, users_in_batch=None, items_in_batch=None, new_users=False, new_items=False, random_seed=None):
"""
Updates the model with batches of data from a subset of users or items
Note
----
You must pass either the **full set of user-item interactions** that are non-zero for some
subset of users, or the **full set of item-user intersactions** that are non-zero for some
subset of items.
Otherwise, if passing a random sample of triplets, the model will not converge to reasonable results.
Note
----
All user and items IDs must be integers starting at one, without gaps in the numeration.
Note
----
For better results, fit the model with full-batch iterations (using the 'fit' method).
Adding new users and/or items without refitting the model might result in worsened results
for existing users/items. For adding users without altering the parameters for items or for
other users, see the method 'add_user'.
Note
----
Fitting in mini-batches is more prone to numerical instability and compared to full-batch
variational inference, it is more likely that all your parameters will turn to NaNs (which
means the optimization procedure failed).
Parameters
----------
counts_df : data frame (n_samples, 3)
Data frame with the user-item interactions for some subset of users. Must have columns
'UserId', 'ItemId', 'Count'.
batch_type : str, one of 'users' or 'items'
Whether 'counts_df' contains a sample of users with all their item counts ('users'), or a
sample of items with all their user counts ('items').
step_size : None or float in (0, 1)
Step size with which to update the global variables in the model. Must be a number between
zero and one. If passing None, will determine it according to the step size function with which
the model was initialized and the number of iterations or calls to partial fit that have been
performed. If no valid function was passed at the initialization, it will use 1/sqrt(i+1).
nusers : int
Total number of users (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
nitems : int
Total number of items (not just in this batch!). Only required if calling partial_fit for the
first time on a model object that hasn't been fit.
users_in_batch : None or array (n_users_sample,)
Users that are present int counts_df. If passing None, will determine the unique elements in
counts_df.UserId, but passing them if you already have them will skip this step.
items_in_batch : None or array (n_items_sample,)
Items that are present int counts_df. If passing None, will determine the unique elements in
counts_df.ItemId, but passing them if you already have them will skip this step.
new_users : bool
Whether the data contains new users with numeration greater than the number of users with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
new_items : bool
Whether the data contains new items with numeration greater than the number of items with which
the model was initially fit. **For better results refit the model including all users/items instead
of adding them afterwards**.
random_seed : int
Random seed to be used for the initialization of new user/item parameters. Ignored when
new_users=False and new_items=False.
Returns
-------
self : obj
Copy of this object.
"""
if self.reindex:
raise ValueError("'partial_fit' can only be called when using reindex=False.") # depends on [control=['if'], data=[]]
if not self.keep_all_objs:
raise ValueError("'partial_fit' can only be called when using keep_all_objs=True.") # depends on [control=['if'], data=[]]
if self.keep_data:
try:
self.seen
msg = "When using 'partial_fit', the list of items seen by each user is not updated "
msg += 'with the data passed here.'
warnings.warn(msg) # depends on [control=['try'], data=[]]
except:
msg = "When fitting the model through 'partial_fit' without calling 'fit' beforehand, "
msg += "'keep_data' will be forced to False."
warnings.warn(msg)
self.keep_data = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
assert batch_type in ['users', 'items']
if batch_type == 'users':
user_batch = True # depends on [control=['if'], data=[]]
else:
user_batch = False
if nusers is None:
try:
nusers = self.nusers # depends on [control=['try'], data=[]]
except:
raise ValueError("Must specify total number of users when calling 'partial_fit' for the first time.") # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['nusers']]
if nitems is None:
try:
nitems = self.nitems # depends on [control=['try'], data=[]]
except:
raise ValueError("Must specify total number of items when calling 'partial_fit' for the first time.") # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['nitems']]
try:
if self.nusers is None:
self.nusers = nusers # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
self.nusers = nusers # depends on [control=['except'], data=[]]
try:
if self.nitems is None:
self.nitems = nitems # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
self.nitems = nitems # depends on [control=['except'], data=[]]
if step_size is None:
try:
self.step_size(0)
try:
step_size = self.step_size(self.niter) # depends on [control=['try'], data=[]]
except:
self.niter = 0
step_size = 1.0 # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except:
try:
step_size = 1 / np.sqrt(self.niter + 2) # depends on [control=['try'], data=[]]
except:
self.niter = 0
step_size = 1.0 # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['step_size']]
assert step_size >= 0
assert step_size <= 1
if random_seed is not None:
if isinstance(random_seed, float):
random_seed = int(random_seed) # depends on [control=['if'], data=[]]
assert isinstance(random_seed, int) # depends on [control=['if'], data=['random_seed']]
if counts_df.__class__.__name__ == 'ndarray':
counts_df = pd.DataFrame(counts_df)
counts_df.columns[:3] = ['UserId', 'ItemId', 'Count'] # depends on [control=['if'], data=[]]
assert counts_df.__class__.__name__ == 'DataFrame'
assert 'UserId' in counts_df.columns.values
assert 'ItemId' in counts_df.columns.values
assert 'Count' in counts_df.columns.values
assert counts_df.shape[0] > 0
Y_batch = counts_df.Count.values.astype('float32')
ix_u_batch = counts_df.UserId.values.astype(cython_loops.obj_ind_type)
ix_i_batch = counts_df.ItemId.values.astype(cython_loops.obj_ind_type)
if users_in_batch is None:
users_in_batch = np.unique(ix_u_batch) # depends on [control=['if'], data=['users_in_batch']]
else:
users_in_batch = np.array(users_in_batch).astype(cython_loops.obj_ind_type)
if items_in_batch is None:
items_in_batch = np.unique(ix_i_batch) # depends on [control=['if'], data=['items_in_batch']]
else:
items_in_batch = np.array(items_in_batch).astype(cython_loops.obj_ind_type)
if self.Theta is None or self.Beta is None:
self._cast_before_fit()
(self.Gamma_shp, self.Gamma_rte, self.Lambda_shp, self.Lambda_rte, self.k_rte, self.t_rte) = cython_loops.initialize_parameters(self.Theta, self.Beta, self.random_seed, self.a, self.a_prime, self.b_prime, self.c, self.c_prime, self.d_prime)
self.Theta = self.Gamma_shp / self.Gamma_rte
self.Beta = self.Lambda_shp / self.Lambda_rte # depends on [control=['if'], data=[]]
if new_users:
if not self.keep_all_objs:
raise ValueError('Can only add users without refitting when using keep_all_objs=True') # depends on [control=['if'], data=[]]
nusers_now = ix_u_batch.max() + 1
nusers_add = self.nusers - nusers_now
if nusers_add < 1:
raise ValueError("There are no new users in the data passed to 'partial_fit'.") # depends on [control=['if'], data=[]]
self._initialize_extra_users(nusers_add, random_seed)
self.nusers += nusers_add # depends on [control=['if'], data=[]]
if new_items:
if not self.keep_all_objs:
raise ValueError('Can only add items without refitting when using keep_all_objs=True') # depends on [control=['if'], data=[]]
nitems_now = ix_i_batch.max() + 1
nitems_add = self.nitems - nitems_now
if nitems_add < 1:
raise ValueError("There are no new items in the data passed to 'partial_fit'.") # depends on [control=['if'], data=[]]
self._initialize_extra_items(nitems_add, random_seed)
self.nitems += nitems_add # depends on [control=['if'], data=[]]
k_shp = cython_loops.cast_float(self.a_prime + self.k * self.a)
t_shp = cython_loops.cast_float(self.c_prime + self.k * self.c)
add_k_rte = cython_loops.cast_float(self.a_prime / self.b_prime)
add_t_rte = cython_loops.cast_float(self.c_prime / self.d_prime)
multiplier_batch = float(nusers) / users_in_batch.shape[0]
cython_loops.partial_fit(Y_batch, ix_u_batch, ix_i_batch, self.Theta, self.Beta, self.Gamma_shp, self.Gamma_rte, self.Lambda_shp, self.Lambda_rte, self.k_rte, self.t_rte, add_k_rte, add_t_rte, self.a, self.c, k_shp, t_shp, cython_loops.cast_ind_type(self.k), users_in_batch, items_in_batch, cython_loops.cast_int(self.allow_inconsistent_math), cython_loops.cast_float(step_size), cython_loops.cast_float(multiplier_batch), self.ncores, user_batch)
self.niter += 1
self.is_fitted = True
return self |
def create_env(self, interpreter, is_current, options):
"""Create the virtualenv and return its info."""
if is_current:
# apply pyvenv options
pyvenv_options = options['pyvenv_options']
if "--system-site-packages" in pyvenv_options:
self.system_site_packages = True
logger.debug("Creating virtualenv with pyvenv. options=%s", pyvenv_options)
self.create(self.env_path)
else:
virtualenv_options = options['virtualenv_options']
logger.debug("Creating virtualenv with virtualenv")
self.create_with_virtualenv(interpreter, virtualenv_options)
logger.debug("env_bin_path: %s", self.env_bin_path)
# Re check if pip was installed (supporting both binary and .exe for Windows)
pip_bin = os.path.join(self.env_bin_path, "pip")
pip_exe = os.path.join(self.env_bin_path, "pip.exe")
if not (os.path.exists(pip_bin) or os.path.exists(pip_exe)):
logger.debug("pip isn't installed in the venv, setting pip_installed=False")
self.pip_installed = False
return self.env_path, self.env_bin_path, self.pip_installed | def function[create_env, parameter[self, interpreter, is_current, options]]:
constant[Create the virtualenv and return its info.]
if name[is_current] begin[:]
variable[pyvenv_options] assign[=] call[name[options]][constant[pyvenv_options]]
if compare[constant[--system-site-packages] in name[pyvenv_options]] begin[:]
name[self].system_site_packages assign[=] constant[True]
call[name[logger].debug, parameter[constant[Creating virtualenv with pyvenv. options=%s], name[pyvenv_options]]]
call[name[self].create, parameter[name[self].env_path]]
call[name[logger].debug, parameter[constant[env_bin_path: %s], name[self].env_bin_path]]
variable[pip_bin] assign[=] call[name[os].path.join, parameter[name[self].env_bin_path, constant[pip]]]
variable[pip_exe] assign[=] call[name[os].path.join, parameter[name[self].env_bin_path, constant[pip.exe]]]
if <ast.UnaryOp object at 0x7da1b0efe890> begin[:]
call[name[logger].debug, parameter[constant[pip isn't installed in the venv, setting pip_installed=False]]]
name[self].pip_installed assign[=] constant[False]
return[tuple[[<ast.Attribute object at 0x7da1b0efead0>, <ast.Attribute object at 0x7da1b0efe650>, <ast.Attribute object at 0x7da1b0efcf10>]]] | keyword[def] identifier[create_env] ( identifier[self] , identifier[interpreter] , identifier[is_current] , identifier[options] ):
literal[string]
keyword[if] identifier[is_current] :
identifier[pyvenv_options] = identifier[options] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[pyvenv_options] :
identifier[self] . identifier[system_site_packages] = keyword[True]
identifier[logger] . identifier[debug] ( literal[string] , identifier[pyvenv_options] )
identifier[self] . identifier[create] ( identifier[self] . identifier[env_path] )
keyword[else] :
identifier[virtualenv_options] = identifier[options] [ literal[string] ]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[create_with_virtualenv] ( identifier[interpreter] , identifier[virtualenv_options] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[env_bin_path] )
identifier[pip_bin] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[env_bin_path] , literal[string] )
identifier[pip_exe] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[env_bin_path] , literal[string] )
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[pip_bin] ) keyword[or] identifier[os] . identifier[path] . identifier[exists] ( identifier[pip_exe] )):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[pip_installed] = keyword[False]
keyword[return] identifier[self] . identifier[env_path] , identifier[self] . identifier[env_bin_path] , identifier[self] . identifier[pip_installed] | def create_env(self, interpreter, is_current, options):
"""Create the virtualenv and return its info."""
if is_current:
# apply pyvenv options
pyvenv_options = options['pyvenv_options']
if '--system-site-packages' in pyvenv_options:
self.system_site_packages = True # depends on [control=['if'], data=[]]
logger.debug('Creating virtualenv with pyvenv. options=%s', pyvenv_options)
self.create(self.env_path) # depends on [control=['if'], data=[]]
else:
virtualenv_options = options['virtualenv_options']
logger.debug('Creating virtualenv with virtualenv')
self.create_with_virtualenv(interpreter, virtualenv_options)
logger.debug('env_bin_path: %s', self.env_bin_path)
# Re check if pip was installed (supporting both binary and .exe for Windows)
pip_bin = os.path.join(self.env_bin_path, 'pip')
pip_exe = os.path.join(self.env_bin_path, 'pip.exe')
if not (os.path.exists(pip_bin) or os.path.exists(pip_exe)):
logger.debug("pip isn't installed in the venv, setting pip_installed=False")
self.pip_installed = False # depends on [control=['if'], data=[]]
return (self.env_path, self.env_bin_path, self.pip_installed) |
def _check_valid_data(self, data):
"""Checks that the given data is a float array with one channel.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
"""
if data.dtype.type is not np.float32 and \
data.dtype.type is not np.float64:
raise ValueError(
'Illegal data type. Depth images only support float arrays')
if len(data.shape) == 3 and data.shape[2] != 1:
raise ValueError(
'Illegal data type. Depth images only support single channel') | def function[_check_valid_data, parameter[self, data]]:
constant[Checks that the given data is a float array with one channel.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
]
if <ast.BoolOp object at 0x7da1b04d5780> begin[:]
<ast.Raise object at 0x7da1b04d6ad0>
if <ast.BoolOp object at 0x7da1b04d6da0> begin[:]
<ast.Raise object at 0x7da1b04d53c0> | keyword[def] identifier[_check_valid_data] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] . identifier[dtype] . identifier[type] keyword[is] keyword[not] identifier[np] . identifier[float32] keyword[and] identifier[data] . identifier[dtype] . identifier[type] keyword[is] keyword[not] identifier[np] . identifier[float64] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] identifier[len] ( identifier[data] . identifier[shape] )== literal[int] keyword[and] identifier[data] . identifier[shape] [ literal[int] ]!= literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] ) | def _check_valid_data(self, data):
"""Checks that the given data is a float array with one channel.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
"""
if data.dtype.type is not np.float32 and data.dtype.type is not np.float64:
raise ValueError('Illegal data type. Depth images only support float arrays') # depends on [control=['if'], data=[]]
if len(data.shape) == 3 and data.shape[2] != 1:
raise ValueError('Illegal data type. Depth images only support single channel') # depends on [control=['if'], data=[]] |
def get_documents(self, term):
"""
Returns all documents related to the specified term in the
form of a Counter object.
"""
if term not in self._terms:
raise IndexError(TERM_DOES_NOT_EXIST)
else:
return self._terms[term] | def function[get_documents, parameter[self, term]]:
constant[
Returns all documents related to the specified term in the
form of a Counter object.
]
if compare[name[term] <ast.NotIn object at 0x7da2590d7190> name[self]._terms] begin[:]
<ast.Raise object at 0x7da1b0f45420> | keyword[def] identifier[get_documents] ( identifier[self] , identifier[term] ):
literal[string]
keyword[if] identifier[term] keyword[not] keyword[in] identifier[self] . identifier[_terms] :
keyword[raise] identifier[IndexError] ( identifier[TERM_DOES_NOT_EXIST] )
keyword[else] :
keyword[return] identifier[self] . identifier[_terms] [ identifier[term] ] | def get_documents(self, term):
"""
Returns all documents related to the specified term in the
form of a Counter object.
"""
if term not in self._terms:
raise IndexError(TERM_DOES_NOT_EXIST) # depends on [control=['if'], data=[]]
else:
return self._terms[term] |
def WriteClientCrashInfo(self, client_id, crash_info):
"""Writes a new client crash record."""
if client_id not in self.metadatas:
raise db.UnknownClientError(client_id)
ts = rdfvalue.RDFDatetime.Now()
self.metadatas[client_id]["last_crash_timestamp"] = ts
history = self.crash_history.setdefault(client_id, {})
history[ts] = crash_info.SerializeToString() | def function[WriteClientCrashInfo, parameter[self, client_id, crash_info]]:
constant[Writes a new client crash record.]
if compare[name[client_id] <ast.NotIn object at 0x7da2590d7190> name[self].metadatas] begin[:]
<ast.Raise object at 0x7da1b1d91ab0>
variable[ts] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]]
call[call[name[self].metadatas][name[client_id]]][constant[last_crash_timestamp]] assign[=] name[ts]
variable[history] assign[=] call[name[self].crash_history.setdefault, parameter[name[client_id], dictionary[[], []]]]
call[name[history]][name[ts]] assign[=] call[name[crash_info].SerializeToString, parameter[]] | keyword[def] identifier[WriteClientCrashInfo] ( identifier[self] , identifier[client_id] , identifier[crash_info] ):
literal[string]
keyword[if] identifier[client_id] keyword[not] keyword[in] identifier[self] . identifier[metadatas] :
keyword[raise] identifier[db] . identifier[UnknownClientError] ( identifier[client_id] )
identifier[ts] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()
identifier[self] . identifier[metadatas] [ identifier[client_id] ][ literal[string] ]= identifier[ts]
identifier[history] = identifier[self] . identifier[crash_history] . identifier[setdefault] ( identifier[client_id] ,{})
identifier[history] [ identifier[ts] ]= identifier[crash_info] . identifier[SerializeToString] () | def WriteClientCrashInfo(self, client_id, crash_info):
"""Writes a new client crash record."""
if client_id not in self.metadatas:
raise db.UnknownClientError(client_id) # depends on [control=['if'], data=['client_id']]
ts = rdfvalue.RDFDatetime.Now()
self.metadatas[client_id]['last_crash_timestamp'] = ts
history = self.crash_history.setdefault(client_id, {})
history[ts] = crash_info.SerializeToString() |
def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(
ctx.general_options, ctx.credentials, spec
).start() | def function[upload, parameter[ctx]]:
constant[Upload files to Azure Storage]
call[name[settings].add_cli_options, parameter[name[ctx].cli_options, name[settings].TransferAction.Upload]]
call[name[ctx].initialize, parameter[name[settings].TransferAction.Upload]]
variable[specs] assign[=] call[name[settings].create_upload_specifications, parameter[name[ctx].cli_options, name[ctx].config]]
<ast.Delete object at 0x7da20e9b2650>
for taget[name[spec]] in starred[name[specs]] begin[:]
call[call[name[blobxfer].api.Uploader, parameter[name[ctx].general_options, name[ctx].credentials, name[spec]]].start, parameter[]] | keyword[def] identifier[upload] ( identifier[ctx] ):
literal[string]
identifier[settings] . identifier[add_cli_options] ( identifier[ctx] . identifier[cli_options] , identifier[settings] . identifier[TransferAction] . identifier[Upload] )
identifier[ctx] . identifier[initialize] ( identifier[settings] . identifier[TransferAction] . identifier[Upload] )
identifier[specs] = identifier[settings] . identifier[create_upload_specifications] (
identifier[ctx] . identifier[cli_options] , identifier[ctx] . identifier[config] )
keyword[del] identifier[ctx] . identifier[cli_options]
keyword[for] identifier[spec] keyword[in] identifier[specs] :
identifier[blobxfer] . identifier[api] . identifier[Uploader] (
identifier[ctx] . identifier[general_options] , identifier[ctx] . identifier[credentials] , identifier[spec]
). identifier[start] () | def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(ctx.general_options, ctx.credentials, spec).start() # depends on [control=['for'], data=['spec']] |
def convex_hull(self):
"""Region representing the convex hull.
:returns: :class:`jicimagelib.region.Region`
"""
hull_array = skimage.morphology.convex_hull_image(self.bitmap)
return Region(hull_array) | def function[convex_hull, parameter[self]]:
constant[Region representing the convex hull.
:returns: :class:`jicimagelib.region.Region`
]
variable[hull_array] assign[=] call[name[skimage].morphology.convex_hull_image, parameter[name[self].bitmap]]
return[call[name[Region], parameter[name[hull_array]]]] | keyword[def] identifier[convex_hull] ( identifier[self] ):
literal[string]
identifier[hull_array] = identifier[skimage] . identifier[morphology] . identifier[convex_hull_image] ( identifier[self] . identifier[bitmap] )
keyword[return] identifier[Region] ( identifier[hull_array] ) | def convex_hull(self):
"""Region representing the convex hull.
:returns: :class:`jicimagelib.region.Region`
"""
hull_array = skimage.morphology.convex_hull_image(self.bitmap)
return Region(hull_array) |
def from_Z(z: int):
"""
Get an element from an atomic number.
Args:
z (int): Atomic number
Returns:
Element with atomic number z.
"""
for sym, data in _pt_data.items():
if data["Atomic no"] == z:
return Element(sym)
raise ValueError("No element with this atomic number %s" % z) | def function[from_Z, parameter[z]]:
constant[
Get an element from an atomic number.
Args:
z (int): Atomic number
Returns:
Element with atomic number z.
]
for taget[tuple[[<ast.Name object at 0x7da204565210>, <ast.Name object at 0x7da204564040>]]] in starred[call[name[_pt_data].items, parameter[]]] begin[:]
if compare[call[name[data]][constant[Atomic no]] equal[==] name[z]] begin[:]
return[call[name[Element], parameter[name[sym]]]]
<ast.Raise object at 0x7da2045660b0> | keyword[def] identifier[from_Z] ( identifier[z] : identifier[int] ):
literal[string]
keyword[for] identifier[sym] , identifier[data] keyword[in] identifier[_pt_data] . identifier[items] ():
keyword[if] identifier[data] [ literal[string] ]== identifier[z] :
keyword[return] identifier[Element] ( identifier[sym] )
keyword[raise] identifier[ValueError] ( literal[string] % identifier[z] ) | def from_Z(z: int):
"""
Get an element from an atomic number.
Args:
z (int): Atomic number
Returns:
Element with atomic number z.
"""
for (sym, data) in _pt_data.items():
if data['Atomic no'] == z:
return Element(sym) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise ValueError('No element with this atomic number %s' % z) |
def SampleStart(self):
"""Starts measuring the CPU time."""
self._start_cpu_time = time.clock()
self.start_sample_time = time.time()
self.total_cpu_time = 0 | def function[SampleStart, parameter[self]]:
constant[Starts measuring the CPU time.]
name[self]._start_cpu_time assign[=] call[name[time].clock, parameter[]]
name[self].start_sample_time assign[=] call[name[time].time, parameter[]]
name[self].total_cpu_time assign[=] constant[0] | keyword[def] identifier[SampleStart] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_start_cpu_time] = identifier[time] . identifier[clock] ()
identifier[self] . identifier[start_sample_time] = identifier[time] . identifier[time] ()
identifier[self] . identifier[total_cpu_time] = literal[int] | def SampleStart(self):
"""Starts measuring the CPU time."""
self._start_cpu_time = time.clock()
self.start_sample_time = time.time()
self.total_cpu_time = 0 |
def company(anon, obj, field, val):
"""
Generates a random company name
"""
return anon.faker.company(field=field) | def function[company, parameter[anon, obj, field, val]]:
constant[
Generates a random company name
]
return[call[name[anon].faker.company, parameter[]]] | keyword[def] identifier[company] ( identifier[anon] , identifier[obj] , identifier[field] , identifier[val] ):
literal[string]
keyword[return] identifier[anon] . identifier[faker] . identifier[company] ( identifier[field] = identifier[field] ) | def company(anon, obj, field, val):
"""
Generates a random company name
"""
return anon.faker.company(field=field) |
def create_config():
"""Initiates process to generate configuration file."""
file_exists = exists(CONFIG_FILE)
if not file_exists:
# Set directories
config.add_section('Directories')
defaults = [
('AbstractRetrieval', expanduser('~/.scopus/abstract_retrieval')),
('AffiliationSearch', expanduser('~/.scopus/affiliation_search')),
('AuthorRetrieval', expanduser('~/.scopus/author_retrieval')),
('AuthorSearch', expanduser('~/.scopus/author_search')),
('CitationOverview', expanduser('~/.scopus/citation_overview')),
('ContentAffiliationRetrieval', expanduser('~/.scopus/affiliation_retrieval')),
('ScopusSearch', expanduser('~/.scopus/scopus_search'))
]
for key, value in defaults:
config.set('Directories', key, value)
if not exists(value):
makedirs(value)
# Set authentication
config.add_section('Authentication')
prompt_key = "Please enter your API Key, obtained from "\
"http://dev.elsevier.com/myapikey.html: \n"
if py3:
key = input(prompt_key)
else:
key = raw_input(prompt_key)
config.set('Authentication', 'APIKey', key)
prompt_token = "API Keys are sufficient for most users. If you "\
"have to use Authtoken authentication, please enter "\
"the token, otherwise press Enter: \n"
if py3:
token = input(prompt_token)
else:
token = raw_input(prompt_token)
if len(token) > 0:
config.set('Authentication', 'InstToken', token)
# Write out
with open(CONFIG_FILE, 'w') as f:
config.write(f)
else:
text = "Configuration file already exists at {}; process to create "\
"the file aborted. Please open the file and edit the "\
"entries manually.".format(CONFIG_FILE)
raise FileExistsError(text) | def function[create_config, parameter[]]:
constant[Initiates process to generate configuration file.]
variable[file_exists] assign[=] call[name[exists], parameter[name[CONFIG_FILE]]]
if <ast.UnaryOp object at 0x7da1b2347490> begin[:]
call[name[config].add_section, parameter[constant[Directories]]]
variable[defaults] assign[=] list[[<ast.Tuple object at 0x7da1b23472e0>, <ast.Tuple object at 0x7da1b2344430>, <ast.Tuple object at 0x7da1b2346e90>, <ast.Tuple object at 0x7da1b2346350>, <ast.Tuple object at 0x7da1b2345f90>, <ast.Tuple object at 0x7da1b2345360>, <ast.Tuple object at 0x7da1b2345c90>]]
for taget[tuple[[<ast.Name object at 0x7da1b2347ac0>, <ast.Name object at 0x7da1b23468f0>]]] in starred[name[defaults]] begin[:]
call[name[config].set, parameter[constant[Directories], name[key], name[value]]]
if <ast.UnaryOp object at 0x7da1b2344a90> begin[:]
call[name[makedirs], parameter[name[value]]]
call[name[config].add_section, parameter[constant[Authentication]]]
variable[prompt_key] assign[=] constant[Please enter your API Key, obtained from http://dev.elsevier.com/myapikey.html:
]
if name[py3] begin[:]
variable[key] assign[=] call[name[input], parameter[name[prompt_key]]]
call[name[config].set, parameter[constant[Authentication], constant[APIKey], name[key]]]
variable[prompt_token] assign[=] constant[API Keys are sufficient for most users. If you have to use Authtoken authentication, please enter the token, otherwise press Enter:
]
if name[py3] begin[:]
variable[token] assign[=] call[name[input], parameter[name[prompt_token]]]
if compare[call[name[len], parameter[name[token]]] greater[>] constant[0]] begin[:]
call[name[config].set, parameter[constant[Authentication], constant[InstToken], name[token]]]
with call[name[open], parameter[name[CONFIG_FILE], constant[w]]] begin[:]
call[name[config].write, parameter[name[f]]] | keyword[def] identifier[create_config] ():
literal[string]
identifier[file_exists] = identifier[exists] ( identifier[CONFIG_FILE] )
keyword[if] keyword[not] identifier[file_exists] :
identifier[config] . identifier[add_section] ( literal[string] )
identifier[defaults] =[
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] )),
( literal[string] , identifier[expanduser] ( literal[string] ))
]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[defaults] :
identifier[config] . identifier[set] ( literal[string] , identifier[key] , identifier[value] )
keyword[if] keyword[not] identifier[exists] ( identifier[value] ):
identifier[makedirs] ( identifier[value] )
identifier[config] . identifier[add_section] ( literal[string] )
identifier[prompt_key] = literal[string] literal[string]
keyword[if] identifier[py3] :
identifier[key] = identifier[input] ( identifier[prompt_key] )
keyword[else] :
identifier[key] = identifier[raw_input] ( identifier[prompt_key] )
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[key] )
identifier[prompt_token] = literal[string] literal[string] literal[string]
keyword[if] identifier[py3] :
identifier[token] = identifier[input] ( identifier[prompt_token] )
keyword[else] :
identifier[token] = identifier[raw_input] ( identifier[prompt_token] )
keyword[if] identifier[len] ( identifier[token] )> literal[int] :
identifier[config] . identifier[set] ( literal[string] , literal[string] , identifier[token] )
keyword[with] identifier[open] ( identifier[CONFIG_FILE] , literal[string] ) keyword[as] identifier[f] :
identifier[config] . identifier[write] ( identifier[f] )
keyword[else] :
identifier[text] = literal[string] literal[string] literal[string] . identifier[format] ( identifier[CONFIG_FILE] )
keyword[raise] identifier[FileExistsError] ( identifier[text] ) | def create_config():
"""Initiates process to generate configuration file."""
file_exists = exists(CONFIG_FILE)
if not file_exists:
# Set directories
config.add_section('Directories')
defaults = [('AbstractRetrieval', expanduser('~/.scopus/abstract_retrieval')), ('AffiliationSearch', expanduser('~/.scopus/affiliation_search')), ('AuthorRetrieval', expanduser('~/.scopus/author_retrieval')), ('AuthorSearch', expanduser('~/.scopus/author_search')), ('CitationOverview', expanduser('~/.scopus/citation_overview')), ('ContentAffiliationRetrieval', expanduser('~/.scopus/affiliation_retrieval')), ('ScopusSearch', expanduser('~/.scopus/scopus_search'))]
for (key, value) in defaults:
config.set('Directories', key, value)
if not exists(value):
makedirs(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Set authentication
config.add_section('Authentication')
prompt_key = 'Please enter your API Key, obtained from http://dev.elsevier.com/myapikey.html: \n'
if py3:
key = input(prompt_key) # depends on [control=['if'], data=[]]
else:
key = raw_input(prompt_key)
config.set('Authentication', 'APIKey', key)
prompt_token = 'API Keys are sufficient for most users. If you have to use Authtoken authentication, please enter the token, otherwise press Enter: \n'
if py3:
token = input(prompt_token) # depends on [control=['if'], data=[]]
else:
token = raw_input(prompt_token)
if len(token) > 0:
config.set('Authentication', 'InstToken', token) # depends on [control=['if'], data=[]]
# Write out
with open(CONFIG_FILE, 'w') as f:
config.write(f) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
text = 'Configuration file already exists at {}; process to create the file aborted. Please open the file and edit the entries manually.'.format(CONFIG_FILE)
raise FileExistsError(text) |
def get_charset(html):
"""
get charset in html page
return : default if not exist `charset`
"""
regex = r'charset=([a-zA-Z0-9-]+)?'
pattern = re.compile(regex, re.IGNORECASE)
if len(pattern.findall(html)) == 0:
return 'UTF-8'
return pattern.findall(html)[0] | def function[get_charset, parameter[html]]:
constant[
get charset in html page
return : default if not exist `charset`
]
variable[regex] assign[=] constant[charset=([a-zA-Z0-9-]+)?]
variable[pattern] assign[=] call[name[re].compile, parameter[name[regex], name[re].IGNORECASE]]
if compare[call[name[len], parameter[call[name[pattern].findall, parameter[name[html]]]]] equal[==] constant[0]] begin[:]
return[constant[UTF-8]]
return[call[call[name[pattern].findall, parameter[name[html]]]][constant[0]]] | keyword[def] identifier[get_charset] ( identifier[html] ):
literal[string]
identifier[regex] = literal[string]
identifier[pattern] = identifier[re] . identifier[compile] ( identifier[regex] , identifier[re] . identifier[IGNORECASE] )
keyword[if] identifier[len] ( identifier[pattern] . identifier[findall] ( identifier[html] ))== literal[int] :
keyword[return] literal[string]
keyword[return] identifier[pattern] . identifier[findall] ( identifier[html] )[ literal[int] ] | def get_charset(html):
"""
get charset in html page
return : default if not exist `charset`
"""
regex = 'charset=([a-zA-Z0-9-]+)?'
pattern = re.compile(regex, re.IGNORECASE)
if len(pattern.findall(html)) == 0:
return 'UTF-8' # depends on [control=['if'], data=[]]
return pattern.findall(html)[0] |
def exec_command_on_nodes(nodes, cmd, label, conn_params=None):
"""Execute a command on a node (id or hostname) or on a set of nodes.
:param nodes: list of targets of the command cmd. Each must be an
execo.Host.
:param cmd: string representing the command to run on the
remote nodes.
:param label: string for debugging purpose.
:param conn_params: connection parameters passed to the execo.Remote
function
"""
if isinstance(nodes, BASESTRING):
nodes = [nodes]
if conn_params is None:
conn_params = DEFAULT_CONN_PARAMS
logger.debug("Running %s on %s ", label, nodes)
remote = ex.get_remote(cmd, nodes, conn_params)
remote.run()
if not remote.finished_ok:
raise Exception('An error occcured during remote execution')
return remote | def function[exec_command_on_nodes, parameter[nodes, cmd, label, conn_params]]:
constant[Execute a command on a node (id or hostname) or on a set of nodes.
:param nodes: list of targets of the command cmd. Each must be an
execo.Host.
:param cmd: string representing the command to run on the
remote nodes.
:param label: string for debugging purpose.
:param conn_params: connection parameters passed to the execo.Remote
function
]
if call[name[isinstance], parameter[name[nodes], name[BASESTRING]]] begin[:]
variable[nodes] assign[=] list[[<ast.Name object at 0x7da207f9ad40>]]
if compare[name[conn_params] is constant[None]] begin[:]
variable[conn_params] assign[=] name[DEFAULT_CONN_PARAMS]
call[name[logger].debug, parameter[constant[Running %s on %s ], name[label], name[nodes]]]
variable[remote] assign[=] call[name[ex].get_remote, parameter[name[cmd], name[nodes], name[conn_params]]]
call[name[remote].run, parameter[]]
if <ast.UnaryOp object at 0x7da207f9baf0> begin[:]
<ast.Raise object at 0x7da207f98220>
return[name[remote]] | keyword[def] identifier[exec_command_on_nodes] ( identifier[nodes] , identifier[cmd] , identifier[label] , identifier[conn_params] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[nodes] , identifier[BASESTRING] ):
identifier[nodes] =[ identifier[nodes] ]
keyword[if] identifier[conn_params] keyword[is] keyword[None] :
identifier[conn_params] = identifier[DEFAULT_CONN_PARAMS]
identifier[logger] . identifier[debug] ( literal[string] , identifier[label] , identifier[nodes] )
identifier[remote] = identifier[ex] . identifier[get_remote] ( identifier[cmd] , identifier[nodes] , identifier[conn_params] )
identifier[remote] . identifier[run] ()
keyword[if] keyword[not] identifier[remote] . identifier[finished_ok] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[remote] | def exec_command_on_nodes(nodes, cmd, label, conn_params=None):
"""Execute a command on a node (id or hostname) or on a set of nodes.
:param nodes: list of targets of the command cmd. Each must be an
execo.Host.
:param cmd: string representing the command to run on the
remote nodes.
:param label: string for debugging purpose.
:param conn_params: connection parameters passed to the execo.Remote
function
"""
if isinstance(nodes, BASESTRING):
nodes = [nodes] # depends on [control=['if'], data=[]]
if conn_params is None:
conn_params = DEFAULT_CONN_PARAMS # depends on [control=['if'], data=['conn_params']]
logger.debug('Running %s on %s ', label, nodes)
remote = ex.get_remote(cmd, nodes, conn_params)
remote.run()
if not remote.finished_ok:
raise Exception('An error occcured during remote execution') # depends on [control=['if'], data=[]]
return remote |
def get_resource(self, resource_key, repository_type, location,
**variables):
"""Get a resource.
Attempts to get and return a cached version of the resource if
available, otherwise a new resource object is created and returned.
Args:
resource_key (`str`): Name of the type of `Resources` to find
repository_type (`str`): What sort of repository to look for the
resource in
location (`str`): location for the repository
variables: data to identify / store on the resource
Returns:
`PackageRepositoryResource` instance.
"""
path = "%s@%s" % (repository_type, location)
repo = self.get_repository(path)
resource = repo.get_resource(**variables)
return resource | def function[get_resource, parameter[self, resource_key, repository_type, location]]:
constant[Get a resource.
Attempts to get and return a cached version of the resource if
available, otherwise a new resource object is created and returned.
Args:
resource_key (`str`): Name of the type of `Resources` to find
repository_type (`str`): What sort of repository to look for the
resource in
location (`str`): location for the repository
variables: data to identify / store on the resource
Returns:
`PackageRepositoryResource` instance.
]
variable[path] assign[=] binary_operation[constant[%s@%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b175d030>, <ast.Name object at 0x7da1b175fd30>]]]
variable[repo] assign[=] call[name[self].get_repository, parameter[name[path]]]
variable[resource] assign[=] call[name[repo].get_resource, parameter[]]
return[name[resource]] | keyword[def] identifier[get_resource] ( identifier[self] , identifier[resource_key] , identifier[repository_type] , identifier[location] ,
** identifier[variables] ):
literal[string]
identifier[path] = literal[string] %( identifier[repository_type] , identifier[location] )
identifier[repo] = identifier[self] . identifier[get_repository] ( identifier[path] )
identifier[resource] = identifier[repo] . identifier[get_resource] (** identifier[variables] )
keyword[return] identifier[resource] | def get_resource(self, resource_key, repository_type, location, **variables):
"""Get a resource.
Attempts to get and return a cached version of the resource if
available, otherwise a new resource object is created and returned.
Args:
resource_key (`str`): Name of the type of `Resources` to find
repository_type (`str`): What sort of repository to look for the
resource in
location (`str`): location for the repository
variables: data to identify / store on the resource
Returns:
`PackageRepositoryResource` instance.
"""
path = '%s@%s' % (repository_type, location)
repo = self.get_repository(path)
resource = repo.get_resource(**variables)
return resource |
def get_user(self, nick: str) -> User:
"""
:param nick: nick or prefix
"""
hostmask = None
if "!" in nick:
nick, _, hostmask = nick.partition("!")
user = self._users.get(nick)
if not user:
self._users[nick] = user = User(nick, self, hostmask=hostmask)
elif not user.hostmask:
user.hostmask = hostmask
return user | def function[get_user, parameter[self, nick]]:
constant[
:param nick: nick or prefix
]
variable[hostmask] assign[=] constant[None]
if compare[constant[!] in name[nick]] begin[:]
<ast.Tuple object at 0x7da2054a5d50> assign[=] call[name[nick].partition, parameter[constant[!]]]
variable[user] assign[=] call[name[self]._users.get, parameter[name[nick]]]
if <ast.UnaryOp object at 0x7da2054a7880> begin[:]
call[name[self]._users][name[nick]] assign[=] call[name[User], parameter[name[nick], name[self]]]
return[name[user]] | keyword[def] identifier[get_user] ( identifier[self] , identifier[nick] : identifier[str] )-> identifier[User] :
literal[string]
identifier[hostmask] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[nick] :
identifier[nick] , identifier[_] , identifier[hostmask] = identifier[nick] . identifier[partition] ( literal[string] )
identifier[user] = identifier[self] . identifier[_users] . identifier[get] ( identifier[nick] )
keyword[if] keyword[not] identifier[user] :
identifier[self] . identifier[_users] [ identifier[nick] ]= identifier[user] = identifier[User] ( identifier[nick] , identifier[self] , identifier[hostmask] = identifier[hostmask] )
keyword[elif] keyword[not] identifier[user] . identifier[hostmask] :
identifier[user] . identifier[hostmask] = identifier[hostmask]
keyword[return] identifier[user] | def get_user(self, nick: str) -> User:
"""
:param nick: nick or prefix
"""
hostmask = None
if '!' in nick:
(nick, _, hostmask) = nick.partition('!') # depends on [control=['if'], data=['nick']]
user = self._users.get(nick)
if not user:
self._users[nick] = user = User(nick, self, hostmask=hostmask) # depends on [control=['if'], data=[]]
elif not user.hostmask:
user.hostmask = hostmask # depends on [control=['if'], data=[]]
return user |
def construct(cls, name: str, declared_fields: typing.List[tuple]):
"""
Utility method packaged along with the factory to be able to construct Request Object
classes on the fly.
Example:
.. code-block:: python
UserShowRequestObject = Factory.create_request_object(
'CreateRequestObject',
[('identifier', int, {'required': True}),
('name', str, {'required': True}),
('desc', str, {'default': 'Blah'})])
And then create a request object like so:
.. code-block:: python
request_object = UserShowRequestObject.from_dict(
{'identifier': 112,
'name': 'Jane',
'desc': "Doer is not Doe"})
The third tuple element is a `dict` of the form: {'required': True, 'default': 'John'}
* ``required`` is False by default, so ``{required: False, default: 'John'}`` and \
``{default: 'John'}`` evaluate to the same field definition
* ``default`` is a *concrete* value of the correct type
"""
# FIXME Refactor this method to make it simpler
@classmethod
def from_dict(cls, adict):
"""Validate and initialize a Request Object"""
invalid_req = InvalidRequestObject()
values = {}
for item in fields(cls):
value = None
if item.metadata and 'required' in item.metadata and item.metadata['required']:
if item.name not in adict or adict.get(item.name) is None:
invalid_req.add_error(item.name, 'is required')
else:
value = adict[item.name]
elif item.name in adict:
value = adict[item.name]
elif item.default:
value = item.default
try:
if item.type not in [typing.Any, 'typing.Any'] and value is not None:
if item.type in [int, float, str, bool, list, dict, tuple,
datetime.date, datetime.datetime]:
value = item.type(value)
else:
if not (isinstance(value, item.type) or issubclass(value, item.type)):
invalid_req.add_error(
item.name,
'{} should be of type {}'.format(item.name, item.type))
except Exception:
invalid_req.add_error(
item.name,
'Value {} for {} is invalid'.format(value, item.name))
values[item.name] = value
# Return errors, if any, instead of a request object
if invalid_req.has_errors:
return invalid_req
# Return the initialized Request Object instance
return cls(**values)
formatted_fields = cls._format_fields(declared_fields)
dc = make_dataclass(name, formatted_fields,
namespace={'from_dict': from_dict, 'is_valid': True})
return dc | def function[construct, parameter[cls, name, declared_fields]]:
constant[
Utility method packaged along with the factory to be able to construct Request Object
classes on the fly.
Example:
.. code-block:: python
UserShowRequestObject = Factory.create_request_object(
'CreateRequestObject',
[('identifier', int, {'required': True}),
('name', str, {'required': True}),
('desc', str, {'default': 'Blah'})])
And then create a request object like so:
.. code-block:: python
request_object = UserShowRequestObject.from_dict(
{'identifier': 112,
'name': 'Jane',
'desc': "Doer is not Doe"})
The third tuple element is a `dict` of the form: {'required': True, 'default': 'John'}
* ``required`` is False by default, so ``{required: False, default: 'John'}`` and ``{default: 'John'}`` evaluate to the same field definition
* ``default`` is a *concrete* value of the correct type
]
def function[from_dict, parameter[cls, adict]]:
constant[Validate and initialize a Request Object]
variable[invalid_req] assign[=] call[name[InvalidRequestObject], parameter[]]
variable[values] assign[=] dictionary[[], []]
for taget[name[item]] in starred[call[name[fields], parameter[name[cls]]]] begin[:]
variable[value] assign[=] constant[None]
if <ast.BoolOp object at 0x7da2041d94e0> begin[:]
if <ast.BoolOp object at 0x7da18ede5b40> begin[:]
call[name[invalid_req].add_error, parameter[name[item].name, constant[is required]]]
<ast.Try object at 0x7da1b1b0f130>
call[name[values]][name[item].name] assign[=] name[value]
if name[invalid_req].has_errors begin[:]
return[name[invalid_req]]
return[call[name[cls], parameter[]]]
variable[formatted_fields] assign[=] call[name[cls]._format_fields, parameter[name[declared_fields]]]
variable[dc] assign[=] call[name[make_dataclass], parameter[name[name], name[formatted_fields]]]
return[name[dc]] | keyword[def] identifier[construct] ( identifier[cls] , identifier[name] : identifier[str] , identifier[declared_fields] : identifier[typing] . identifier[List] [ identifier[tuple] ]):
literal[string]
@ identifier[classmethod]
keyword[def] identifier[from_dict] ( identifier[cls] , identifier[adict] ):
literal[string]
identifier[invalid_req] = identifier[InvalidRequestObject] ()
identifier[values] ={}
keyword[for] identifier[item] keyword[in] identifier[fields] ( identifier[cls] ):
identifier[value] = keyword[None]
keyword[if] identifier[item] . identifier[metadata] keyword[and] literal[string] keyword[in] identifier[item] . identifier[metadata] keyword[and] identifier[item] . identifier[metadata] [ literal[string] ]:
keyword[if] identifier[item] . identifier[name] keyword[not] keyword[in] identifier[adict] keyword[or] identifier[adict] . identifier[get] ( identifier[item] . identifier[name] ) keyword[is] keyword[None] :
identifier[invalid_req] . identifier[add_error] ( identifier[item] . identifier[name] , literal[string] )
keyword[else] :
identifier[value] = identifier[adict] [ identifier[item] . identifier[name] ]
keyword[elif] identifier[item] . identifier[name] keyword[in] identifier[adict] :
identifier[value] = identifier[adict] [ identifier[item] . identifier[name] ]
keyword[elif] identifier[item] . identifier[default] :
identifier[value] = identifier[item] . identifier[default]
keyword[try] :
keyword[if] identifier[item] . identifier[type] keyword[not] keyword[in] [ identifier[typing] . identifier[Any] , literal[string] ] keyword[and] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[item] . identifier[type] keyword[in] [ identifier[int] , identifier[float] , identifier[str] , identifier[bool] , identifier[list] , identifier[dict] , identifier[tuple] ,
identifier[datetime] . identifier[date] , identifier[datetime] . identifier[datetime] ]:
identifier[value] = identifier[item] . identifier[type] ( identifier[value] )
keyword[else] :
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[value] , identifier[item] . identifier[type] ) keyword[or] identifier[issubclass] ( identifier[value] , identifier[item] . identifier[type] )):
identifier[invalid_req] . identifier[add_error] (
identifier[item] . identifier[name] ,
literal[string] . identifier[format] ( identifier[item] . identifier[name] , identifier[item] . identifier[type] ))
keyword[except] identifier[Exception] :
identifier[invalid_req] . identifier[add_error] (
identifier[item] . identifier[name] ,
literal[string] . identifier[format] ( identifier[value] , identifier[item] . identifier[name] ))
identifier[values] [ identifier[item] . identifier[name] ]= identifier[value]
keyword[if] identifier[invalid_req] . identifier[has_errors] :
keyword[return] identifier[invalid_req]
keyword[return] identifier[cls] (** identifier[values] )
identifier[formatted_fields] = identifier[cls] . identifier[_format_fields] ( identifier[declared_fields] )
identifier[dc] = identifier[make_dataclass] ( identifier[name] , identifier[formatted_fields] ,
identifier[namespace] ={ literal[string] : identifier[from_dict] , literal[string] : keyword[True] })
keyword[return] identifier[dc] | def construct(cls, name: str, declared_fields: typing.List[tuple]):
"""
Utility method packaged along with the factory to be able to construct Request Object
classes on the fly.
Example:
.. code-block:: python
UserShowRequestObject = Factory.create_request_object(
'CreateRequestObject',
[('identifier', int, {'required': True}),
('name', str, {'required': True}),
('desc', str, {'default': 'Blah'})])
And then create a request object like so:
.. code-block:: python
request_object = UserShowRequestObject.from_dict(
{'identifier': 112,
'name': 'Jane',
'desc': "Doer is not Doe"})
The third tuple element is a `dict` of the form: {'required': True, 'default': 'John'}
* ``required`` is False by default, so ``{required: False, default: 'John'}`` and ``{default: 'John'}`` evaluate to the same field definition
* ``default`` is a *concrete* value of the correct type
"""
# FIXME Refactor this method to make it simpler
@classmethod
def from_dict(cls, adict):
"""Validate and initialize a Request Object"""
invalid_req = InvalidRequestObject()
values = {}
for item in fields(cls):
value = None
if item.metadata and 'required' in item.metadata and item.metadata['required']:
if item.name not in adict or adict.get(item.name) is None:
invalid_req.add_error(item.name, 'is required') # depends on [control=['if'], data=[]]
else:
value = adict[item.name] # depends on [control=['if'], data=[]]
elif item.name in adict:
value = adict[item.name] # depends on [control=['if'], data=['adict']]
elif item.default:
value = item.default # depends on [control=['if'], data=[]]
try:
if item.type not in [typing.Any, 'typing.Any'] and value is not None:
if item.type in [int, float, str, bool, list, dict, tuple, datetime.date, datetime.datetime]:
value = item.type(value) # depends on [control=['if'], data=[]]
elif not (isinstance(value, item.type) or issubclass(value, item.type)):
invalid_req.add_error(item.name, '{} should be of type {}'.format(item.name, item.type)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
invalid_req.add_error(item.name, 'Value {} for {} is invalid'.format(value, item.name)) # depends on [control=['except'], data=[]]
values[item.name] = value # depends on [control=['for'], data=['item']]
# Return errors, if any, instead of a request object
if invalid_req.has_errors:
return invalid_req # depends on [control=['if'], data=[]]
# Return the initialized Request Object instance
return cls(**values)
formatted_fields = cls._format_fields(declared_fields)
dc = make_dataclass(name, formatted_fields, namespace={'from_dict': from_dict, 'is_valid': True})
return dc |
def build_workspace_path(user_id, workflow_id=None):
"""Build user's workspace relative path.
:param user_id: Owner of the workspace.
:param workflow_id: Optional parameter, if provided gives the path to the
workflow workspace instead of just the path to the user workspace.
:return: String that represents the workspace relative path.
i.e. users/0000/workflows/0034
"""
workspace_path = os.path.join('users', str(user_id), 'workflows')
if workflow_id:
workspace_path = os.path.join(workspace_path, str(workflow_id))
return workspace_path | def function[build_workspace_path, parameter[user_id, workflow_id]]:
constant[Build user's workspace relative path.
:param user_id: Owner of the workspace.
:param workflow_id: Optional parameter, if provided gives the path to the
workflow workspace instead of just the path to the user workspace.
:return: String that represents the workspace relative path.
i.e. users/0000/workflows/0034
]
variable[workspace_path] assign[=] call[name[os].path.join, parameter[constant[users], call[name[str], parameter[name[user_id]]], constant[workflows]]]
if name[workflow_id] begin[:]
variable[workspace_path] assign[=] call[name[os].path.join, parameter[name[workspace_path], call[name[str], parameter[name[workflow_id]]]]]
return[name[workspace_path]] | keyword[def] identifier[build_workspace_path] ( identifier[user_id] , identifier[workflow_id] = keyword[None] ):
literal[string]
identifier[workspace_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[str] ( identifier[user_id] ), literal[string] )
keyword[if] identifier[workflow_id] :
identifier[workspace_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[workspace_path] , identifier[str] ( identifier[workflow_id] ))
keyword[return] identifier[workspace_path] | def build_workspace_path(user_id, workflow_id=None):
"""Build user's workspace relative path.
:param user_id: Owner of the workspace.
:param workflow_id: Optional parameter, if provided gives the path to the
workflow workspace instead of just the path to the user workspace.
:return: String that represents the workspace relative path.
i.e. users/0000/workflows/0034
"""
workspace_path = os.path.join('users', str(user_id), 'workflows')
if workflow_id:
workspace_path = os.path.join(workspace_path, str(workflow_id)) # depends on [control=['if'], data=[]]
return workspace_path |
def remove_child(self, child_pid):
"""Remove a child from a PID concept."""
with db.session.begin_nested():
if not isinstance(child_pid, PersistentIdentifier):
child_pid = resolve_pid(child_pid)
relation = PIDRelation.query.filter_by(
parent=self._resolved_pid,
child=child_pid,
relation_type=self.relation_type.id).one()
db.session.delete(relation) | def function[remove_child, parameter[self, child_pid]]:
constant[Remove a child from a PID concept.]
with call[name[db].session.begin_nested, parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da2044c0a90> begin[:]
variable[child_pid] assign[=] call[name[resolve_pid], parameter[name[child_pid]]]
variable[relation] assign[=] call[call[name[PIDRelation].query.filter_by, parameter[]].one, parameter[]]
call[name[db].session.delete, parameter[name[relation]]] | keyword[def] identifier[remove_child] ( identifier[self] , identifier[child_pid] ):
literal[string]
keyword[with] identifier[db] . identifier[session] . identifier[begin_nested] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[child_pid] , identifier[PersistentIdentifier] ):
identifier[child_pid] = identifier[resolve_pid] ( identifier[child_pid] )
identifier[relation] = identifier[PIDRelation] . identifier[query] . identifier[filter_by] (
identifier[parent] = identifier[self] . identifier[_resolved_pid] ,
identifier[child] = identifier[child_pid] ,
identifier[relation_type] = identifier[self] . identifier[relation_type] . identifier[id] ). identifier[one] ()
identifier[db] . identifier[session] . identifier[delete] ( identifier[relation] ) | def remove_child(self, child_pid):
"""Remove a child from a PID concept."""
with db.session.begin_nested():
if not isinstance(child_pid, PersistentIdentifier):
child_pid = resolve_pid(child_pid) # depends on [control=['if'], data=[]]
relation = PIDRelation.query.filter_by(parent=self._resolved_pid, child=child_pid, relation_type=self.relation_type.id).one()
db.session.delete(relation) # depends on [control=['with'], data=[]] |
def gcm_send_bulk_message(registration_ids, data, encoding='utf-8', **kwargs):
"""
Standalone method to send bulk gcm notifications
"""
messenger = GCMMessenger(registration_ids, data, encoding=encoding, **kwargs)
return messenger.send_bulk() | def function[gcm_send_bulk_message, parameter[registration_ids, data, encoding]]:
constant[
Standalone method to send bulk gcm notifications
]
variable[messenger] assign[=] call[name[GCMMessenger], parameter[name[registration_ids], name[data]]]
return[call[name[messenger].send_bulk, parameter[]]] | keyword[def] identifier[gcm_send_bulk_message] ( identifier[registration_ids] , identifier[data] , identifier[encoding] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[messenger] = identifier[GCMMessenger] ( identifier[registration_ids] , identifier[data] , identifier[encoding] = identifier[encoding] ,** identifier[kwargs] )
keyword[return] identifier[messenger] . identifier[send_bulk] () | def gcm_send_bulk_message(registration_ids, data, encoding='utf-8', **kwargs):
"""
Standalone method to send bulk gcm notifications
"""
messenger = GCMMessenger(registration_ids, data, encoding=encoding, **kwargs)
return messenger.send_bulk() |
def _validate_roles(model):
"""Given the model, check that all the metadata role values
have valid information in them and any required metadata fields
contain values.
"""
required_roles = (ATTRIBUTED_ROLE_KEYS[0], ATTRIBUTED_ROLE_KEYS[4],)
for role_key in ATTRIBUTED_ROLE_KEYS:
try:
roles = model.metadata[role_key]
except KeyError:
if role_key in required_roles:
raise exceptions.MissingRequiredMetadata(role_key)
else:
if role_key in required_roles and len(roles) == 0:
raise exceptions.MissingRequiredMetadata(role_key)
for role in roles:
if role.get('type') != 'cnx-id':
raise exceptions.InvalidRole(role_key, role) | def function[_validate_roles, parameter[model]]:
constant[Given the model, check that all the metadata role values
have valid information in them and any required metadata fields
contain values.
]
variable[required_roles] assign[=] tuple[[<ast.Subscript object at 0x7da18ede4e80>, <ast.Subscript object at 0x7da18ede5000>]]
for taget[name[role_key]] in starred[name[ATTRIBUTED_ROLE_KEYS]] begin[:]
<ast.Try object at 0x7da18ede6bf0>
for taget[name[role]] in starred[name[roles]] begin[:]
if compare[call[name[role].get, parameter[constant[type]]] not_equal[!=] constant[cnx-id]] begin[:]
<ast.Raise object at 0x7da20c6e55a0> | keyword[def] identifier[_validate_roles] ( identifier[model] ):
literal[string]
identifier[required_roles] =( identifier[ATTRIBUTED_ROLE_KEYS] [ literal[int] ], identifier[ATTRIBUTED_ROLE_KEYS] [ literal[int] ],)
keyword[for] identifier[role_key] keyword[in] identifier[ATTRIBUTED_ROLE_KEYS] :
keyword[try] :
identifier[roles] = identifier[model] . identifier[metadata] [ identifier[role_key] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[role_key] keyword[in] identifier[required_roles] :
keyword[raise] identifier[exceptions] . identifier[MissingRequiredMetadata] ( identifier[role_key] )
keyword[else] :
keyword[if] identifier[role_key] keyword[in] identifier[required_roles] keyword[and] identifier[len] ( identifier[roles] )== literal[int] :
keyword[raise] identifier[exceptions] . identifier[MissingRequiredMetadata] ( identifier[role_key] )
keyword[for] identifier[role] keyword[in] identifier[roles] :
keyword[if] identifier[role] . identifier[get] ( literal[string] )!= literal[string] :
keyword[raise] identifier[exceptions] . identifier[InvalidRole] ( identifier[role_key] , identifier[role] ) | def _validate_roles(model):
"""Given the model, check that all the metadata role values
have valid information in them and any required metadata fields
contain values.
"""
required_roles = (ATTRIBUTED_ROLE_KEYS[0], ATTRIBUTED_ROLE_KEYS[4])
for role_key in ATTRIBUTED_ROLE_KEYS:
try:
roles = model.metadata[role_key] # depends on [control=['try'], data=[]]
except KeyError:
if role_key in required_roles:
raise exceptions.MissingRequiredMetadata(role_key) # depends on [control=['if'], data=['role_key']] # depends on [control=['except'], data=[]]
else:
if role_key in required_roles and len(roles) == 0:
raise exceptions.MissingRequiredMetadata(role_key) # depends on [control=['if'], data=[]]
for role in roles:
if role.get('type') != 'cnx-id':
raise exceptions.InvalidRole(role_key, role) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['role']] # depends on [control=['for'], data=['role_key']] |
def _check_valid_cf_units(self, ds, variable_name):
'''
Checks that the variable contains units attribute, the attribute is a
string and the value is not deprecated by CF
:param netCDF4.Dataset ds: An open netCDF dataset
:param str variable_name: Name of the variable to be checked
:rtype:
:return: List of results
'''
# This list is straight from section 3
deprecated = ['level', 'layer', 'sigma_level']
variable = ds.variables[variable_name]
units = getattr(variable, 'units', None)
standard_name_full = getattr(variable, 'standard_name', None)
standard_name, standard_name_modifier = self._split_standard_name(standard_name_full)
std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root,
standard_name)
# Is this even in the database? also, if there is no standard_name,
# there's no way to know if it is dimensionless.
should_be_dimensionless = (variable.dtype.char == 'S' or
std_name_units_dimensionless or
standard_name is None)
# 1) Units must exist
valid_units = TestCtx(BaseCheck.HIGH, self.section_titles['3.1'])
valid_units.assert_true(should_be_dimensionless or units is not None,
'units attribute is required for {} when variable is not a dimensionless quantity'.format(variable_name))
# Don't bother checking the rest
if units is None and not should_be_dimensionless:
return valid_units.to_result()
# 2) units attribute must be a string
valid_units.assert_true(should_be_dimensionless or isinstance(units, basestring),
'units attribute for {} needs to be a string'.format(variable_name))
# 3) units are not deprecated
valid_units.assert_true(units not in deprecated,
'units for {}, "{}" are deprecated by CF 1.6'.format(variable_name, units))
return valid_units.to_result() | def function[_check_valid_cf_units, parameter[self, ds, variable_name]]:
constant[
Checks that the variable contains units attribute, the attribute is a
string and the value is not deprecated by CF
:param netCDF4.Dataset ds: An open netCDF dataset
:param str variable_name: Name of the variable to be checked
:rtype:
:return: List of results
]
variable[deprecated] assign[=] list[[<ast.Constant object at 0x7da1b0b30280>, <ast.Constant object at 0x7da1b0b304c0>, <ast.Constant object at 0x7da1b0b30490>]]
variable[variable] assign[=] call[name[ds].variables][name[variable_name]]
variable[units] assign[=] call[name[getattr], parameter[name[variable], constant[units], constant[None]]]
variable[standard_name_full] assign[=] call[name[getattr], parameter[name[variable], constant[standard_name], constant[None]]]
<ast.Tuple object at 0x7da1b0b30820> assign[=] call[name[self]._split_standard_name, parameter[name[standard_name_full]]]
variable[std_name_units_dimensionless] assign[=] call[name[cfutil].is_dimensionless_standard_name, parameter[name[self]._std_names._root, name[standard_name]]]
variable[should_be_dimensionless] assign[=] <ast.BoolOp object at 0x7da1b0b30160>
variable[valid_units] assign[=] call[name[TestCtx], parameter[name[BaseCheck].HIGH, call[name[self].section_titles][constant[3.1]]]]
call[name[valid_units].assert_true, parameter[<ast.BoolOp object at 0x7da18f00d1b0>, call[constant[units attribute is required for {} when variable is not a dimensionless quantity].format, parameter[name[variable_name]]]]]
if <ast.BoolOp object at 0x7da18f00c700> begin[:]
return[call[name[valid_units].to_result, parameter[]]]
call[name[valid_units].assert_true, parameter[<ast.BoolOp object at 0x7da18f00f9a0>, call[constant[units attribute for {} needs to be a string].format, parameter[name[variable_name]]]]]
call[name[valid_units].assert_true, parameter[compare[name[units] <ast.NotIn object at 0x7da2590d7190> name[deprecated]], call[constant[units for {}, "{}" are deprecated by CF 1.6].format, parameter[name[variable_name], name[units]]]]]
return[call[name[valid_units].to_result, parameter[]]] | keyword[def] identifier[_check_valid_cf_units] ( identifier[self] , identifier[ds] , identifier[variable_name] ):
literal[string]
identifier[deprecated] =[ literal[string] , literal[string] , literal[string] ]
identifier[variable] = identifier[ds] . identifier[variables] [ identifier[variable_name] ]
identifier[units] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] )
identifier[standard_name_full] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] )
identifier[standard_name] , identifier[standard_name_modifier] = identifier[self] . identifier[_split_standard_name] ( identifier[standard_name_full] )
identifier[std_name_units_dimensionless] = identifier[cfutil] . identifier[is_dimensionless_standard_name] ( identifier[self] . identifier[_std_names] . identifier[_root] ,
identifier[standard_name] )
identifier[should_be_dimensionless] =( identifier[variable] . identifier[dtype] . identifier[char] == literal[string] keyword[or]
identifier[std_name_units_dimensionless] keyword[or]
identifier[standard_name] keyword[is] keyword[None] )
identifier[valid_units] = identifier[TestCtx] ( identifier[BaseCheck] . identifier[HIGH] , identifier[self] . identifier[section_titles] [ literal[string] ])
identifier[valid_units] . identifier[assert_true] ( identifier[should_be_dimensionless] keyword[or] identifier[units] keyword[is] keyword[not] keyword[None] ,
literal[string] . identifier[format] ( identifier[variable_name] ))
keyword[if] identifier[units] keyword[is] keyword[None] keyword[and] keyword[not] identifier[should_be_dimensionless] :
keyword[return] identifier[valid_units] . identifier[to_result] ()
identifier[valid_units] . identifier[assert_true] ( identifier[should_be_dimensionless] keyword[or] identifier[isinstance] ( identifier[units] , identifier[basestring] ),
literal[string] . identifier[format] ( identifier[variable_name] ))
identifier[valid_units] . identifier[assert_true] ( identifier[units] keyword[not] keyword[in] identifier[deprecated] ,
literal[string] . identifier[format] ( identifier[variable_name] , identifier[units] ))
keyword[return] identifier[valid_units] . identifier[to_result] () | def _check_valid_cf_units(self, ds, variable_name):
"""
Checks that the variable contains units attribute, the attribute is a
string and the value is not deprecated by CF
:param netCDF4.Dataset ds: An open netCDF dataset
:param str variable_name: Name of the variable to be checked
:rtype:
:return: List of results
"""
# This list is straight from section 3
deprecated = ['level', 'layer', 'sigma_level']
variable = ds.variables[variable_name]
units = getattr(variable, 'units', None)
standard_name_full = getattr(variable, 'standard_name', None)
(standard_name, standard_name_modifier) = self._split_standard_name(standard_name_full)
std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name)
# Is this even in the database? also, if there is no standard_name,
# there's no way to know if it is dimensionless.
should_be_dimensionless = variable.dtype.char == 'S' or std_name_units_dimensionless or standard_name is None
# 1) Units must exist
valid_units = TestCtx(BaseCheck.HIGH, self.section_titles['3.1'])
valid_units.assert_true(should_be_dimensionless or units is not None, 'units attribute is required for {} when variable is not a dimensionless quantity'.format(variable_name))
# Don't bother checking the rest
if units is None and (not should_be_dimensionless):
return valid_units.to_result() # depends on [control=['if'], data=[]]
# 2) units attribute must be a string
valid_units.assert_true(should_be_dimensionless or isinstance(units, basestring), 'units attribute for {} needs to be a string'.format(variable_name))
# 3) units are not deprecated
valid_units.assert_true(units not in deprecated, 'units for {}, "{}" are deprecated by CF 1.6'.format(variable_name, units))
return valid_units.to_result() |
def format_dap_from_dapi(name, version='', full=False):
'''Formats information about given DAP from DAPI in a human readable form to list of lines'''
lines = []
m, d = _get_metadap_dap(name, version)
if d:
# Determining label width
labels = BASIC_LABELS + ['average_rank'] # average_rank comes from m, not d
if full:
labels.extend(EXTRA_LABELS)
label_width = dapi.DapFormatter.calculate_offset(labels)
# Metadata
lines += dapi.DapFormatter.format_meta_lines(d, labels=labels, offset=label_width)
lines.append(dapi.DapFormatter.format_dapi_score(m, offset=label_width))
if 'assistants' in d:
# Assistants
assistants = sorted([a for a in d['assistants'] if a.startswith('assistants')])
lines.append('')
for line in dapi.DapFormatter.format_assistants_lines(assistants):
lines.append(line)
# Snippets
if full:
snippets = sorted([a for a in d['assistants'] if a.startswith('snippets')])
lines.append('')
lines += dapi.DapFormatter.format_snippets(snippets)
# Supported platforms
if d.get('supported_platforms', ''):
lines.append('')
lines += dapi.DapFormatter.format_platforms(d['supported_platforms'])
lines.append('')
return lines | def function[format_dap_from_dapi, parameter[name, version, full]]:
constant[Formats information about given DAP from DAPI in a human readable form to list of lines]
variable[lines] assign[=] list[[]]
<ast.Tuple object at 0x7da1b0f0f4f0> assign[=] call[name[_get_metadap_dap], parameter[name[name], name[version]]]
if name[d] begin[:]
variable[labels] assign[=] binary_operation[name[BASIC_LABELS] + list[[<ast.Constant object at 0x7da1b0f0cfa0>]]]
if name[full] begin[:]
call[name[labels].extend, parameter[name[EXTRA_LABELS]]]
variable[label_width] assign[=] call[name[dapi].DapFormatter.calculate_offset, parameter[name[labels]]]
<ast.AugAssign object at 0x7da1b0f0fdf0>
call[name[lines].append, parameter[call[name[dapi].DapFormatter.format_dapi_score, parameter[name[m]]]]]
if compare[constant[assistants] in name[d]] begin[:]
variable[assistants] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b0f0d8d0>]]
call[name[lines].append, parameter[constant[]]]
for taget[name[line]] in starred[call[name[dapi].DapFormatter.format_assistants_lines, parameter[name[assistants]]]] begin[:]
call[name[lines].append, parameter[name[line]]]
if name[full] begin[:]
variable[snippets] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b0f0d9c0>]]
call[name[lines].append, parameter[constant[]]]
<ast.AugAssign object at 0x7da1b0f0c310>
if call[name[d].get, parameter[constant[supported_platforms], constant[]]] begin[:]
call[name[lines].append, parameter[constant[]]]
<ast.AugAssign object at 0x7da1b0f0d6f0>
call[name[lines].append, parameter[constant[]]]
return[name[lines]] | keyword[def] identifier[format_dap_from_dapi] ( identifier[name] , identifier[version] = literal[string] , identifier[full] = keyword[False] ):
literal[string]
identifier[lines] =[]
identifier[m] , identifier[d] = identifier[_get_metadap_dap] ( identifier[name] , identifier[version] )
keyword[if] identifier[d] :
identifier[labels] = identifier[BASIC_LABELS] +[ literal[string] ]
keyword[if] identifier[full] :
identifier[labels] . identifier[extend] ( identifier[EXTRA_LABELS] )
identifier[label_width] = identifier[dapi] . identifier[DapFormatter] . identifier[calculate_offset] ( identifier[labels] )
identifier[lines] += identifier[dapi] . identifier[DapFormatter] . identifier[format_meta_lines] ( identifier[d] , identifier[labels] = identifier[labels] , identifier[offset] = identifier[label_width] )
identifier[lines] . identifier[append] ( identifier[dapi] . identifier[DapFormatter] . identifier[format_dapi_score] ( identifier[m] , identifier[offset] = identifier[label_width] ))
keyword[if] literal[string] keyword[in] identifier[d] :
identifier[assistants] = identifier[sorted] ([ identifier[a] keyword[for] identifier[a] keyword[in] identifier[d] [ literal[string] ] keyword[if] identifier[a] . identifier[startswith] ( literal[string] )])
identifier[lines] . identifier[append] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[dapi] . identifier[DapFormatter] . identifier[format_assistants_lines] ( identifier[assistants] ):
identifier[lines] . identifier[append] ( identifier[line] )
keyword[if] identifier[full] :
identifier[snippets] = identifier[sorted] ([ identifier[a] keyword[for] identifier[a] keyword[in] identifier[d] [ literal[string] ] keyword[if] identifier[a] . identifier[startswith] ( literal[string] )])
identifier[lines] . identifier[append] ( literal[string] )
identifier[lines] += identifier[dapi] . identifier[DapFormatter] . identifier[format_snippets] ( identifier[snippets] )
keyword[if] identifier[d] . identifier[get] ( literal[string] , literal[string] ):
identifier[lines] . identifier[append] ( literal[string] )
identifier[lines] += identifier[dapi] . identifier[DapFormatter] . identifier[format_platforms] ( identifier[d] [ literal[string] ])
identifier[lines] . identifier[append] ( literal[string] )
keyword[return] identifier[lines] | def format_dap_from_dapi(name, version='', full=False):
"""Formats information about given DAP from DAPI in a human readable form to list of lines"""
lines = []
(m, d) = _get_metadap_dap(name, version)
if d:
# Determining label width
labels = BASIC_LABELS + ['average_rank'] # average_rank comes from m, not d
if full:
labels.extend(EXTRA_LABELS) # depends on [control=['if'], data=[]]
label_width = dapi.DapFormatter.calculate_offset(labels)
# Metadata
lines += dapi.DapFormatter.format_meta_lines(d, labels=labels, offset=label_width)
lines.append(dapi.DapFormatter.format_dapi_score(m, offset=label_width))
if 'assistants' in d:
# Assistants
assistants = sorted([a for a in d['assistants'] if a.startswith('assistants')])
lines.append('')
for line in dapi.DapFormatter.format_assistants_lines(assistants):
lines.append(line) # depends on [control=['for'], data=['line']]
# Snippets
if full:
snippets = sorted([a for a in d['assistants'] if a.startswith('snippets')])
lines.append('')
lines += dapi.DapFormatter.format_snippets(snippets) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['d']]
# Supported platforms
if d.get('supported_platforms', ''):
lines.append('')
lines += dapi.DapFormatter.format_platforms(d['supported_platforms']) # depends on [control=['if'], data=[]]
lines.append('') # depends on [control=['if'], data=[]]
return lines |
def handle_label_relation(self, line: str, position: int, tokens: ParseResults) -> ParseResults:
"""Handle statements like ``p(X) label "Label for X"``.
:raises: RelabelWarning
"""
subject_node_dsl = self.ensure_node(tokens[SUBJECT])
description = tokens[OBJECT]
if self.graph.has_node_description(subject_node_dsl):
raise RelabelWarning(
line_number=self.get_line_number(),
line=line,
position=position,
node=self.graph.node,
old_label=self.graph.get_node_description(subject_node_dsl),
new_label=description
)
self.graph.set_node_description(subject_node_dsl, description)
return tokens | def function[handle_label_relation, parameter[self, line, position, tokens]]:
constant[Handle statements like ``p(X) label "Label for X"``.
:raises: RelabelWarning
]
variable[subject_node_dsl] assign[=] call[name[self].ensure_node, parameter[call[name[tokens]][name[SUBJECT]]]]
variable[description] assign[=] call[name[tokens]][name[OBJECT]]
if call[name[self].graph.has_node_description, parameter[name[subject_node_dsl]]] begin[:]
<ast.Raise object at 0x7da1b26ad810>
call[name[self].graph.set_node_description, parameter[name[subject_node_dsl], name[description]]]
return[name[tokens]] | keyword[def] identifier[handle_label_relation] ( identifier[self] , identifier[line] : identifier[str] , identifier[position] : identifier[int] , identifier[tokens] : identifier[ParseResults] )-> identifier[ParseResults] :
literal[string]
identifier[subject_node_dsl] = identifier[self] . identifier[ensure_node] ( identifier[tokens] [ identifier[SUBJECT] ])
identifier[description] = identifier[tokens] [ identifier[OBJECT] ]
keyword[if] identifier[self] . identifier[graph] . identifier[has_node_description] ( identifier[subject_node_dsl] ):
keyword[raise] identifier[RelabelWarning] (
identifier[line_number] = identifier[self] . identifier[get_line_number] (),
identifier[line] = identifier[line] ,
identifier[position] = identifier[position] ,
identifier[node] = identifier[self] . identifier[graph] . identifier[node] ,
identifier[old_label] = identifier[self] . identifier[graph] . identifier[get_node_description] ( identifier[subject_node_dsl] ),
identifier[new_label] = identifier[description]
)
identifier[self] . identifier[graph] . identifier[set_node_description] ( identifier[subject_node_dsl] , identifier[description] )
keyword[return] identifier[tokens] | def handle_label_relation(self, line: str, position: int, tokens: ParseResults) -> ParseResults:
"""Handle statements like ``p(X) label "Label for X"``.
:raises: RelabelWarning
"""
subject_node_dsl = self.ensure_node(tokens[SUBJECT])
description = tokens[OBJECT]
if self.graph.has_node_description(subject_node_dsl):
raise RelabelWarning(line_number=self.get_line_number(), line=line, position=position, node=self.graph.node, old_label=self.graph.get_node_description(subject_node_dsl), new_label=description) # depends on [control=['if'], data=[]]
self.graph.set_node_description(subject_node_dsl, description)
return tokens |
def _init(sdp_state: SDPState):
"""Initialise the Master Controller Service.
Performs the following actions:
1. Registers ServiceState objects into the Config Db.
2. If initialising for the first time (unknown state),
sets the SDPState to 'init'
3. Initialises the state of Services, if running for the first time
(their state == unknown)
4. Waits some time and sets the Service states to 'on'. This emulates
waiting for Services to become available.
5. Once all services are 'on', sets the SDP state to 'standby'.
"""
# Parse command line arguments.
LOG.info("Initialising: %s", __service_id__)
# FIXME(BMo) There is a bug when SDP or services 'start' in the 'off'
# state. At the moment it is impossible to transition out of this.
# FIXME(BMo) **Hack** Register all services or if already registered do
# nothing (this is handled by the ServiceState object).
_services = [
"ExecutionControl:AlarmReceiver:1.0.0",
"ExecutionControl:AlertManager:1.0.0",
"ExecutionControl:ConfigurationDatabase:5.0.1",
"ExecutionControl:MasterController:1.3.0",
"ExecutionControl:ProcessingController:1.2.6",
"ExecutionControl:ProcessingBlockController:1.3.0",
"TangoControl:Database:1.0.4",
"TangoControl:MySQL:1.0.3",
"TangoControl:SDPMaster:1.2.1",
"TangoControl:Subarrays:1.2.0",
"TangoControl:ProcessingBlocks:1.2.0",
"Platform:Kafka:2.1.1",
"Platform:Prometheus:1.0.0",
"Platform:PrometheusPushGateway:0.7.0",
"Platform:RedisCommander:210.0.0",
"Platform:Zookeeper:3.4.13"
]
for service_id in _services:
subsystem, name, version = service_id.split(':')
ServiceState(subsystem, name, version)
# If the SDP state is 'unknown', mark the SDP state as init.
# FIXME(BMo) This is not right as we want to allow for recovery from
# failure without just reinitialising...!? ie. respect the old sate
# NOTE: If the state is 'off' we will want to reset the database
# with 'skasip_config_db_init --clear'
if sdp_state.current_state in ['unknown', 'off']:
try:
LOG.info("Setting the SDPState to 'init'")
sdp_state.update_current_state('init', force=True)
except ValueError as error:
LOG.critical('Unable to set the State of SDP to init! %s',
str(error))
LOG.info("Updating Service States")
service_state_list = get_service_state_list()
# FIXME(BMo) **Hack** Mark all Services in the 'unknown' state as
# initialising.
for service_state in service_state_list:
if service_state.current_state in ['unknown', 'off']:
service_state.update_current_state('init', force=True)
# FIXME(BMo) **Hack** After 'checking' that the services are 'on' set
# their state on 'on' after a short delay.
# FIXME(BMo) This check should not be serialised!!! (should be part of the
# event loop)
for service_state in service_state_list:
if service_state.current_state == 'init':
time.sleep(random.uniform(0, 0.2))
service_state.update_current_state('on')
# FIXME(BMo): **Hack** Now the all services are on, set the sate of SDP to
# 'standby'
# FIXME(BMo) This should also be part of the event loop.
services_on = [service.current_state == 'on'
for service in service_state_list]
if all(services_on):
LOG.info('All Services are online!.')
sdp_state.update_current_state('standby')
else:
LOG.critical('Master Controller failed to initialise.')
return service_state_list | def function[_init, parameter[sdp_state]]:
constant[Initialise the Master Controller Service.
Performs the following actions:
1. Registers ServiceState objects into the Config Db.
2. If initialising for the first time (unknown state),
sets the SDPState to 'init'
3. Initialises the state of Services, if running for the first time
(their state == unknown)
4. Waits some time and sets the Service states to 'on'. This emulates
waiting for Services to become available.
5. Once all services are 'on', sets the SDP state to 'standby'.
]
call[name[LOG].info, parameter[constant[Initialising: %s], name[__service_id__]]]
variable[_services] assign[=] list[[<ast.Constant object at 0x7da18f00dff0>, <ast.Constant object at 0x7da18f00efe0>, <ast.Constant object at 0x7da18f00cd00>, <ast.Constant object at 0x7da18f00d600>, <ast.Constant object at 0x7da18f00c8e0>, <ast.Constant object at 0x7da18f00ec50>, <ast.Constant object at 0x7da18f00ebc0>, <ast.Constant object at 0x7da18f00e920>, <ast.Constant object at 0x7da18f00df90>, <ast.Constant object at 0x7da18f00f5e0>, <ast.Constant object at 0x7da18f00f8b0>, <ast.Constant object at 0x7da18f00d660>, <ast.Constant object at 0x7da18f00e8c0>, <ast.Constant object at 0x7da18f00f3d0>, <ast.Constant object at 0x7da18f00e7d0>, <ast.Constant object at 0x7da18f00d0c0>]]
for taget[name[service_id]] in starred[name[_services]] begin[:]
<ast.Tuple object at 0x7da18f00e860> assign[=] call[name[service_id].split, parameter[constant[:]]]
call[name[ServiceState], parameter[name[subsystem], name[name], name[version]]]
if compare[name[sdp_state].current_state in list[[<ast.Constant object at 0x7da18f00e530>, <ast.Constant object at 0x7da18f00e350>]]] begin[:]
<ast.Try object at 0x7da18f00e950>
call[name[LOG].info, parameter[constant[Updating Service States]]]
variable[service_state_list] assign[=] call[name[get_service_state_list], parameter[]]
for taget[name[service_state]] in starred[name[service_state_list]] begin[:]
if compare[name[service_state].current_state in list[[<ast.Constant object at 0x7da18f00d870>, <ast.Constant object at 0x7da18f00f040>]]] begin[:]
call[name[service_state].update_current_state, parameter[constant[init]]]
for taget[name[service_state]] in starred[name[service_state_list]] begin[:]
if compare[name[service_state].current_state equal[==] constant[init]] begin[:]
call[name[time].sleep, parameter[call[name[random].uniform, parameter[constant[0], constant[0.2]]]]]
call[name[service_state].update_current_state, parameter[constant[on]]]
variable[services_on] assign[=] <ast.ListComp object at 0x7da18f00fdf0>
if call[name[all], parameter[name[services_on]]] begin[:]
call[name[LOG].info, parameter[constant[All Services are online!.]]]
call[name[sdp_state].update_current_state, parameter[constant[standby]]]
return[name[service_state_list]] | keyword[def] identifier[_init] ( identifier[sdp_state] : identifier[SDPState] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] , identifier[__service_id__] )
identifier[_services] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
keyword[for] identifier[service_id] keyword[in] identifier[_services] :
identifier[subsystem] , identifier[name] , identifier[version] = identifier[service_id] . identifier[split] ( literal[string] )
identifier[ServiceState] ( identifier[subsystem] , identifier[name] , identifier[version] )
keyword[if] identifier[sdp_state] . identifier[current_state] keyword[in] [ literal[string] , literal[string] ]:
keyword[try] :
identifier[LOG] . identifier[info] ( literal[string] )
identifier[sdp_state] . identifier[update_current_state] ( literal[string] , identifier[force] = keyword[True] )
keyword[except] identifier[ValueError] keyword[as] identifier[error] :
identifier[LOG] . identifier[critical] ( literal[string] ,
identifier[str] ( identifier[error] ))
identifier[LOG] . identifier[info] ( literal[string] )
identifier[service_state_list] = identifier[get_service_state_list] ()
keyword[for] identifier[service_state] keyword[in] identifier[service_state_list] :
keyword[if] identifier[service_state] . identifier[current_state] keyword[in] [ literal[string] , literal[string] ]:
identifier[service_state] . identifier[update_current_state] ( literal[string] , identifier[force] = keyword[True] )
keyword[for] identifier[service_state] keyword[in] identifier[service_state_list] :
keyword[if] identifier[service_state] . identifier[current_state] == literal[string] :
identifier[time] . identifier[sleep] ( identifier[random] . identifier[uniform] ( literal[int] , literal[int] ))
identifier[service_state] . identifier[update_current_state] ( literal[string] )
identifier[services_on] =[ identifier[service] . identifier[current_state] == literal[string]
keyword[for] identifier[service] keyword[in] identifier[service_state_list] ]
keyword[if] identifier[all] ( identifier[services_on] ):
identifier[LOG] . identifier[info] ( literal[string] )
identifier[sdp_state] . identifier[update_current_state] ( literal[string] )
keyword[else] :
identifier[LOG] . identifier[critical] ( literal[string] )
keyword[return] identifier[service_state_list] | def _init(sdp_state: SDPState):
"""Initialise the Master Controller Service.
Performs the following actions:
1. Registers ServiceState objects into the Config Db.
2. If initialising for the first time (unknown state),
sets the SDPState to 'init'
3. Initialises the state of Services, if running for the first time
(their state == unknown)
4. Waits some time and sets the Service states to 'on'. This emulates
waiting for Services to become available.
5. Once all services are 'on', sets the SDP state to 'standby'.
"""
# Parse command line arguments.
LOG.info('Initialising: %s', __service_id__)
# FIXME(BMo) There is a bug when SDP or services 'start' in the 'off'
# state. At the moment it is impossible to transition out of this.
# FIXME(BMo) **Hack** Register all services or if already registered do
# nothing (this is handled by the ServiceState object).
_services = ['ExecutionControl:AlarmReceiver:1.0.0', 'ExecutionControl:AlertManager:1.0.0', 'ExecutionControl:ConfigurationDatabase:5.0.1', 'ExecutionControl:MasterController:1.3.0', 'ExecutionControl:ProcessingController:1.2.6', 'ExecutionControl:ProcessingBlockController:1.3.0', 'TangoControl:Database:1.0.4', 'TangoControl:MySQL:1.0.3', 'TangoControl:SDPMaster:1.2.1', 'TangoControl:Subarrays:1.2.0', 'TangoControl:ProcessingBlocks:1.2.0', 'Platform:Kafka:2.1.1', 'Platform:Prometheus:1.0.0', 'Platform:PrometheusPushGateway:0.7.0', 'Platform:RedisCommander:210.0.0', 'Platform:Zookeeper:3.4.13']
for service_id in _services:
(subsystem, name, version) = service_id.split(':')
ServiceState(subsystem, name, version) # depends on [control=['for'], data=['service_id']]
# If the SDP state is 'unknown', mark the SDP state as init.
# FIXME(BMo) This is not right as we want to allow for recovery from
# failure without just reinitialising...!? ie. respect the old sate
# NOTE: If the state is 'off' we will want to reset the database
# with 'skasip_config_db_init --clear'
if sdp_state.current_state in ['unknown', 'off']:
try:
LOG.info("Setting the SDPState to 'init'")
sdp_state.update_current_state('init', force=True) # depends on [control=['try'], data=[]]
except ValueError as error:
LOG.critical('Unable to set the State of SDP to init! %s', str(error)) # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]]
LOG.info('Updating Service States')
service_state_list = get_service_state_list()
# FIXME(BMo) **Hack** Mark all Services in the 'unknown' state as
# initialising.
for service_state in service_state_list:
if service_state.current_state in ['unknown', 'off']:
service_state.update_current_state('init', force=True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service_state']]
# FIXME(BMo) **Hack** After 'checking' that the services are 'on' set
# their state on 'on' after a short delay.
# FIXME(BMo) This check should not be serialised!!! (should be part of the
# event loop)
for service_state in service_state_list:
if service_state.current_state == 'init':
time.sleep(random.uniform(0, 0.2))
service_state.update_current_state('on') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service_state']]
# FIXME(BMo): **Hack** Now the all services are on, set the sate of SDP to
# 'standby'
# FIXME(BMo) This should also be part of the event loop.
services_on = [service.current_state == 'on' for service in service_state_list]
if all(services_on):
LOG.info('All Services are online!.')
sdp_state.update_current_state('standby') # depends on [control=['if'], data=[]]
else:
LOG.critical('Master Controller failed to initialise.')
return service_state_list |
def _name_value_to_bson(name, value, check_keys, opts,
in_custom_call=False,
in_fallback_call=False):
"""Encode a single name, value pair."""
# First see if the type is already cached. KeyError will only ever
# happen once per subtype.
try:
return _ENCODERS[type(value)](name, value, check_keys, opts)
except KeyError:
pass
# Second, fall back to trying _type_marker. This has to be done
# before the loop below since users could subclass one of our
# custom types that subclasses a python built-in (e.g. Binary)
marker = getattr(value, "_type_marker", None)
if isinstance(marker, int) and marker in _MARKERS:
func = _MARKERS[marker]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
# Third, check if a type encoder is registered for this type.
# Note that subtypes of registered custom types are not auto-encoded.
if not in_custom_call and opts.type_registry._encoder_map:
custom_encoder = opts.type_registry._encoder_map.get(type(value))
if custom_encoder is not None:
return _name_value_to_bson(
name, custom_encoder(value), check_keys, opts,
in_custom_call=True)
# Fourth, test each base type. This will only happen once for
# a subtype of a supported base type. Unlike in the C-extensions, this
# is done after trying the custom type encoder because checking for each
# subtype is expensive.
for base in _BUILT_IN_TYPES:
if isinstance(value, base):
func = _ENCODERS[base]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
# As a last resort, try using the fallback encoder, if the user has
# provided one.
fallback_encoder = opts.type_registry._fallback_encoder
if not in_fallback_call and fallback_encoder is not None:
return _name_value_to_bson(
name, fallback_encoder(value), check_keys, opts,
in_fallback_call=True)
raise InvalidDocument(
"cannot encode object: %r, of type: %r" % (value, type(value))) | def function[_name_value_to_bson, parameter[name, value, check_keys, opts, in_custom_call, in_fallback_call]]:
constant[Encode a single name, value pair.]
<ast.Try object at 0x7da20e9608b0>
variable[marker] assign[=] call[name[getattr], parameter[name[value], constant[_type_marker], constant[None]]]
if <ast.BoolOp object at 0x7da20e960a00> begin[:]
variable[func] assign[=] call[name[_MARKERS]][name[marker]]
call[name[_ENCODERS]][call[name[type], parameter[name[value]]]] assign[=] name[func]
return[call[name[func], parameter[name[name], name[value], name[check_keys], name[opts]]]]
if <ast.BoolOp object at 0x7da20e962f80> begin[:]
variable[custom_encoder] assign[=] call[name[opts].type_registry._encoder_map.get, parameter[call[name[type], parameter[name[value]]]]]
if compare[name[custom_encoder] is_not constant[None]] begin[:]
return[call[name[_name_value_to_bson], parameter[name[name], call[name[custom_encoder], parameter[name[value]]], name[check_keys], name[opts]]]]
for taget[name[base]] in starred[name[_BUILT_IN_TYPES]] begin[:]
if call[name[isinstance], parameter[name[value], name[base]]] begin[:]
variable[func] assign[=] call[name[_ENCODERS]][name[base]]
call[name[_ENCODERS]][call[name[type], parameter[name[value]]]] assign[=] name[func]
return[call[name[func], parameter[name[name], name[value], name[check_keys], name[opts]]]]
variable[fallback_encoder] assign[=] name[opts].type_registry._fallback_encoder
if <ast.BoolOp object at 0x7da2041db6d0> begin[:]
return[call[name[_name_value_to_bson], parameter[name[name], call[name[fallback_encoder], parameter[name[value]]], name[check_keys], name[opts]]]]
<ast.Raise object at 0x7da2041db3a0> | keyword[def] identifier[_name_value_to_bson] ( identifier[name] , identifier[value] , identifier[check_keys] , identifier[opts] ,
identifier[in_custom_call] = keyword[False] ,
identifier[in_fallback_call] = keyword[False] ):
literal[string]
keyword[try] :
keyword[return] identifier[_ENCODERS] [ identifier[type] ( identifier[value] )]( identifier[name] , identifier[value] , identifier[check_keys] , identifier[opts] )
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[marker] = identifier[getattr] ( identifier[value] , literal[string] , keyword[None] )
keyword[if] identifier[isinstance] ( identifier[marker] , identifier[int] ) keyword[and] identifier[marker] keyword[in] identifier[_MARKERS] :
identifier[func] = identifier[_MARKERS] [ identifier[marker] ]
identifier[_ENCODERS] [ identifier[type] ( identifier[value] )]= identifier[func]
keyword[return] identifier[func] ( identifier[name] , identifier[value] , identifier[check_keys] , identifier[opts] )
keyword[if] keyword[not] identifier[in_custom_call] keyword[and] identifier[opts] . identifier[type_registry] . identifier[_encoder_map] :
identifier[custom_encoder] = identifier[opts] . identifier[type_registry] . identifier[_encoder_map] . identifier[get] ( identifier[type] ( identifier[value] ))
keyword[if] identifier[custom_encoder] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_name_value_to_bson] (
identifier[name] , identifier[custom_encoder] ( identifier[value] ), identifier[check_keys] , identifier[opts] ,
identifier[in_custom_call] = keyword[True] )
keyword[for] identifier[base] keyword[in] identifier[_BUILT_IN_TYPES] :
keyword[if] identifier[isinstance] ( identifier[value] , identifier[base] ):
identifier[func] = identifier[_ENCODERS] [ identifier[base] ]
identifier[_ENCODERS] [ identifier[type] ( identifier[value] )]= identifier[func]
keyword[return] identifier[func] ( identifier[name] , identifier[value] , identifier[check_keys] , identifier[opts] )
identifier[fallback_encoder] = identifier[opts] . identifier[type_registry] . identifier[_fallback_encoder]
keyword[if] keyword[not] identifier[in_fallback_call] keyword[and] identifier[fallback_encoder] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_name_value_to_bson] (
identifier[name] , identifier[fallback_encoder] ( identifier[value] ), identifier[check_keys] , identifier[opts] ,
identifier[in_fallback_call] = keyword[True] )
keyword[raise] identifier[InvalidDocument] (
literal[string] %( identifier[value] , identifier[type] ( identifier[value] ))) | def _name_value_to_bson(name, value, check_keys, opts, in_custom_call=False, in_fallback_call=False):
"""Encode a single name, value pair."""
# First see if the type is already cached. KeyError will only ever
# happen once per subtype.
try:
return _ENCODERS[type(value)](name, value, check_keys, opts) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
# Second, fall back to trying _type_marker. This has to be done
# before the loop below since users could subclass one of our
# custom types that subclasses a python built-in (e.g. Binary)
marker = getattr(value, '_type_marker', None)
if isinstance(marker, int) and marker in _MARKERS:
func = _MARKERS[marker]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts) # depends on [control=['if'], data=[]]
# Third, check if a type encoder is registered for this type.
# Note that subtypes of registered custom types are not auto-encoded.
if not in_custom_call and opts.type_registry._encoder_map:
custom_encoder = opts.type_registry._encoder_map.get(type(value))
if custom_encoder is not None:
return _name_value_to_bson(name, custom_encoder(value), check_keys, opts, in_custom_call=True) # depends on [control=['if'], data=['custom_encoder']] # depends on [control=['if'], data=[]]
# Fourth, test each base type. This will only happen once for
# a subtype of a supported base type. Unlike in the C-extensions, this
# is done after trying the custom type encoder because checking for each
# subtype is expensive.
for base in _BUILT_IN_TYPES:
if isinstance(value, base):
func = _ENCODERS[base]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['base']]
# As a last resort, try using the fallback encoder, if the user has
# provided one.
fallback_encoder = opts.type_registry._fallback_encoder
if not in_fallback_call and fallback_encoder is not None:
return _name_value_to_bson(name, fallback_encoder(value), check_keys, opts, in_fallback_call=True) # depends on [control=['if'], data=[]]
raise InvalidDocument('cannot encode object: %r, of type: %r' % (value, type(value))) |
async def _report_version(self):
"""
This is a private message handler method.
This method reads the following 2 bytes after the report version
command (0xF9 - non sysex).
The first byte is the major number and the second byte is the
minor number.
:returns: None
"""
# get next two bytes
major = await self.read()
version_string = str(major)
minor = await self.read()
version_string += '.'
version_string += str(minor)
self.query_reply_data[PrivateConstants.REPORT_VERSION] = version_string | <ast.AsyncFunctionDef object at 0x7da207f026e0> | keyword[async] keyword[def] identifier[_report_version] ( identifier[self] ):
literal[string]
identifier[major] = keyword[await] identifier[self] . identifier[read] ()
identifier[version_string] = identifier[str] ( identifier[major] )
identifier[minor] = keyword[await] identifier[self] . identifier[read] ()
identifier[version_string] += literal[string]
identifier[version_string] += identifier[str] ( identifier[minor] )
identifier[self] . identifier[query_reply_data] [ identifier[PrivateConstants] . identifier[REPORT_VERSION] ]= identifier[version_string] | async def _report_version(self):
"""
This is a private message handler method.
This method reads the following 2 bytes after the report version
command (0xF9 - non sysex).
The first byte is the major number and the second byte is the
minor number.
:returns: None
"""
# get next two bytes
major = await self.read()
version_string = str(major)
minor = await self.read()
version_string += '.'
version_string += str(minor)
self.query_reply_data[PrivateConstants.REPORT_VERSION] = version_string |
def sim_model_backwards(self,tmax,X0):
""" Simulate the model backwards in time.
"""
X = np.zeros((tmax,self.dim))
X[tmax-1] = X0
for t in range(tmax-2,-1,-1):
sol = sp.optimize.root(self.sim_model_back_help,
X[t+1],
args=(X[t+1]),method='hybr')
X[t] = sol.x
return X | def function[sim_model_backwards, parameter[self, tmax, X0]]:
constant[ Simulate the model backwards in time.
]
variable[X] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b2344340>, <ast.Attribute object at 0x7da1b2344850>]]]]
call[name[X]][binary_operation[name[tmax] - constant[1]]] assign[=] name[X0]
for taget[name[t]] in starred[call[name[range], parameter[binary_operation[name[tmax] - constant[2]], <ast.UnaryOp object at 0x7da1b2347d90>, <ast.UnaryOp object at 0x7da1b2345330>]]] begin[:]
variable[sol] assign[=] call[name[sp].optimize.root, parameter[name[self].sim_model_back_help, call[name[X]][binary_operation[name[t] + constant[1]]]]]
call[name[X]][name[t]] assign[=] name[sol].x
return[name[X]] | keyword[def] identifier[sim_model_backwards] ( identifier[self] , identifier[tmax] , identifier[X0] ):
literal[string]
identifier[X] = identifier[np] . identifier[zeros] (( identifier[tmax] , identifier[self] . identifier[dim] ))
identifier[X] [ identifier[tmax] - literal[int] ]= identifier[X0]
keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[tmax] - literal[int] ,- literal[int] ,- literal[int] ):
identifier[sol] = identifier[sp] . identifier[optimize] . identifier[root] ( identifier[self] . identifier[sim_model_back_help] ,
identifier[X] [ identifier[t] + literal[int] ],
identifier[args] =( identifier[X] [ identifier[t] + literal[int] ]), identifier[method] = literal[string] )
identifier[X] [ identifier[t] ]= identifier[sol] . identifier[x]
keyword[return] identifier[X] | def sim_model_backwards(self, tmax, X0):
""" Simulate the model backwards in time.
"""
X = np.zeros((tmax, self.dim))
X[tmax - 1] = X0
for t in range(tmax - 2, -1, -1):
sol = sp.optimize.root(self.sim_model_back_help, X[t + 1], args=X[t + 1], method='hybr')
X[t] = sol.x # depends on [control=['for'], data=['t']]
return X |
def index(request):
"""Handles a request based on method and calls the appropriate function"""
if request.method == 'GET':
return get(request)
elif request.method == 'POST':
return post(request)
return HttpResponse('') | def function[index, parameter[request]]:
constant[Handles a request based on method and calls the appropriate function]
if compare[name[request].method equal[==] constant[GET]] begin[:]
return[call[name[get], parameter[name[request]]]]
return[call[name[HttpResponse], parameter[constant[]]]] | keyword[def] identifier[index] ( identifier[request] ):
literal[string]
keyword[if] identifier[request] . identifier[method] == literal[string] :
keyword[return] identifier[get] ( identifier[request] )
keyword[elif] identifier[request] . identifier[method] == literal[string] :
keyword[return] identifier[post] ( identifier[request] )
keyword[return] identifier[HttpResponse] ( literal[string] ) | def index(request):
"""Handles a request based on method and calls the appropriate function"""
if request.method == 'GET':
return get(request) # depends on [control=['if'], data=[]]
elif request.method == 'POST':
return post(request) # depends on [control=['if'], data=[]]
return HttpResponse('') |
def forum_post_undelete(self, post_id):
"""Undelete a specific forum post (Requires login)(Moderator+)(UNTESTED).
Parameters:
post_id (int): Forum post id.
"""
return self._get('forum_posts/{0}/undelete.json'.format(post_id),
method='POST', auth=True) | def function[forum_post_undelete, parameter[self, post_id]]:
constant[Undelete a specific forum post (Requires login)(Moderator+)(UNTESTED).
Parameters:
post_id (int): Forum post id.
]
return[call[name[self]._get, parameter[call[constant[forum_posts/{0}/undelete.json].format, parameter[name[post_id]]]]]] | keyword[def] identifier[forum_post_undelete] ( identifier[self] , identifier[post_id] ):
literal[string]
keyword[return] identifier[self] . identifier[_get] ( literal[string] . identifier[format] ( identifier[post_id] ),
identifier[method] = literal[string] , identifier[auth] = keyword[True] ) | def forum_post_undelete(self, post_id):
"""Undelete a specific forum post (Requires login)(Moderator+)(UNTESTED).
Parameters:
post_id (int): Forum post id.
"""
return self._get('forum_posts/{0}/undelete.json'.format(post_id), method='POST', auth=True) |
def get_profile_pic_from_id(self, id):
"""
Get full profile pic from an id
The ID must be on your contact book to
successfully get their profile picture.
:param id: ID
:type id: str
"""
profile_pic = self.wapi_functions.getProfilePicFromId(id)
if profile_pic:
return b64decode(profile_pic)
else:
return False | def function[get_profile_pic_from_id, parameter[self, id]]:
constant[
Get full profile pic from an id
The ID must be on your contact book to
successfully get their profile picture.
:param id: ID
:type id: str
]
variable[profile_pic] assign[=] call[name[self].wapi_functions.getProfilePicFromId, parameter[name[id]]]
if name[profile_pic] begin[:]
return[call[name[b64decode], parameter[name[profile_pic]]]] | keyword[def] identifier[get_profile_pic_from_id] ( identifier[self] , identifier[id] ):
literal[string]
identifier[profile_pic] = identifier[self] . identifier[wapi_functions] . identifier[getProfilePicFromId] ( identifier[id] )
keyword[if] identifier[profile_pic] :
keyword[return] identifier[b64decode] ( identifier[profile_pic] )
keyword[else] :
keyword[return] keyword[False] | def get_profile_pic_from_id(self, id):
"""
Get full profile pic from an id
The ID must be on your contact book to
successfully get their profile picture.
:param id: ID
:type id: str
"""
profile_pic = self.wapi_functions.getProfilePicFromId(id)
if profile_pic:
return b64decode(profile_pic) # depends on [control=['if'], data=[]]
else:
return False |
def set_kmers(kmer_opt, max_read_len):
"""Returns a kmer list based on the provided kmer option and max read len.
Parameters
----------
kmer_opt : str
The k-mer option. Can be either ``'auto'``, ``'default'`` or a
sequence of space separated integers, ``'23, 45, 67'``.
max_read_len : int
The maximum read length of the current sample.
Returns
-------
kmers : list
List of k-mer values that will be provided to Spades.
"""
logger.debug("Kmer option set to: {}".format(kmer_opt))
# Check if kmer option is set to auto
if kmer_opt == "auto":
if max_read_len >= 175:
kmers = [55, 77, 99, 113, 127]
else:
kmers = [21, 33, 55, 67, 77]
logger.debug("Kmer range automatically selected based on max read"
"length of {}: {}".format(max_read_len, kmers))
# Check if manual kmers were specified
elif len(kmer_opt.split()) > 1:
kmers = kmer_opt.split()
logger.debug("Kmer range manually set to: {}".format(kmers))
else:
kmers = []
logger.debug("Kmer range set to empty (will be automatically "
"determined by SPAdes")
return kmers | def function[set_kmers, parameter[kmer_opt, max_read_len]]:
constant[Returns a kmer list based on the provided kmer option and max read len.
Parameters
----------
kmer_opt : str
The k-mer option. Can be either ``'auto'``, ``'default'`` or a
sequence of space separated integers, ``'23, 45, 67'``.
max_read_len : int
The maximum read length of the current sample.
Returns
-------
kmers : list
List of k-mer values that will be provided to Spades.
]
call[name[logger].debug, parameter[call[constant[Kmer option set to: {}].format, parameter[name[kmer_opt]]]]]
if compare[name[kmer_opt] equal[==] constant[auto]] begin[:]
if compare[name[max_read_len] greater_or_equal[>=] constant[175]] begin[:]
variable[kmers] assign[=] list[[<ast.Constant object at 0x7da1b03e04f0>, <ast.Constant object at 0x7da1b03e3520>, <ast.Constant object at 0x7da1b03e04c0>, <ast.Constant object at 0x7da1b03e3e20>, <ast.Constant object at 0x7da1b03e0a60>]]
call[name[logger].debug, parameter[call[constant[Kmer range automatically selected based on max readlength of {}: {}].format, parameter[name[max_read_len], name[kmers]]]]]
return[name[kmers]] | keyword[def] identifier[set_kmers] ( identifier[kmer_opt] , identifier[max_read_len] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[kmer_opt] ))
keyword[if] identifier[kmer_opt] == literal[string] :
keyword[if] identifier[max_read_len] >= literal[int] :
identifier[kmers] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
keyword[else] :
identifier[kmers] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[logger] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[max_read_len] , identifier[kmers] ))
keyword[elif] identifier[len] ( identifier[kmer_opt] . identifier[split] ())> literal[int] :
identifier[kmers] = identifier[kmer_opt] . identifier[split] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[kmers] ))
keyword[else] :
identifier[kmers] =[]
identifier[logger] . identifier[debug] ( literal[string]
literal[string] )
keyword[return] identifier[kmers] | def set_kmers(kmer_opt, max_read_len):
"""Returns a kmer list based on the provided kmer option and max read len.
Parameters
----------
kmer_opt : str
The k-mer option. Can be either ``'auto'``, ``'default'`` or a
sequence of space separated integers, ``'23, 45, 67'``.
max_read_len : int
The maximum read length of the current sample.
Returns
-------
kmers : list
List of k-mer values that will be provided to Spades.
"""
logger.debug('Kmer option set to: {}'.format(kmer_opt))
# Check if kmer option is set to auto
if kmer_opt == 'auto':
if max_read_len >= 175:
kmers = [55, 77, 99, 113, 127] # depends on [control=['if'], data=[]]
else:
kmers = [21, 33, 55, 67, 77]
logger.debug('Kmer range automatically selected based on max readlength of {}: {}'.format(max_read_len, kmers)) # depends on [control=['if'], data=[]]
# Check if manual kmers were specified
elif len(kmer_opt.split()) > 1:
kmers = kmer_opt.split()
logger.debug('Kmer range manually set to: {}'.format(kmers)) # depends on [control=['if'], data=[]]
else:
kmers = []
logger.debug('Kmer range set to empty (will be automatically determined by SPAdes')
return kmers |
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
try:
connections = self.connections[address]
except KeyError:
return 0
else:
return sum(1 if connection.in_use else 0 for connection in connections) | def function[in_use_connection_count, parameter[self, address]]:
constant[ Count the number of connections currently in use to a given
address.
]
<ast.Try object at 0x7da18eb55930> | keyword[def] identifier[in_use_connection_count] ( identifier[self] , identifier[address] ):
literal[string]
keyword[try] :
identifier[connections] = identifier[self] . identifier[connections] [ identifier[address] ]
keyword[except] identifier[KeyError] :
keyword[return] literal[int]
keyword[else] :
keyword[return] identifier[sum] ( literal[int] keyword[if] identifier[connection] . identifier[in_use] keyword[else] literal[int] keyword[for] identifier[connection] keyword[in] identifier[connections] ) | def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
try:
connections = self.connections[address] # depends on [control=['try'], data=[]]
except KeyError:
return 0 # depends on [control=['except'], data=[]]
else:
return sum((1 if connection.in_use else 0 for connection in connections)) |
def update_complex(tree_to_update, xpath_root, xpath_map, prop, values):
"""
Updates and returns the updated complex Element parsed from tree_to_update.
:param tree_to_update: the XML tree compatible with element_utils to be updated
:param xpath_root: the XPATH location of the root of the complex Element
:param xpath_map: a Dictionary of XPATHs corresponding to the complex structure definition
:param prop: the property identifying the complex structure to be serialized
:param values: a Dictionary representing the complex structure to be updated
"""
remove_element(tree_to_update, xpath_root, True)
values = reduce_value(values, {})
if not values:
# Returns the elements corresponding to property removed from the tree
updated = update_property(tree_to_update, xpath_root, xpath_root, prop, values)
else:
for subprop, value in iteritems(values):
xpath = xpath_map[subprop]
value = get_default_for_complex_sub(prop, subprop, value, xpath)
update_property(tree_to_update, None, xpath, subprop, value)
updated = get_element(tree_to_update, xpath_root)
return updated | def function[update_complex, parameter[tree_to_update, xpath_root, xpath_map, prop, values]]:
constant[
Updates and returns the updated complex Element parsed from tree_to_update.
:param tree_to_update: the XML tree compatible with element_utils to be updated
:param xpath_root: the XPATH location of the root of the complex Element
:param xpath_map: a Dictionary of XPATHs corresponding to the complex structure definition
:param prop: the property identifying the complex structure to be serialized
:param values: a Dictionary representing the complex structure to be updated
]
call[name[remove_element], parameter[name[tree_to_update], name[xpath_root], constant[True]]]
variable[values] assign[=] call[name[reduce_value], parameter[name[values], dictionary[[], []]]]
if <ast.UnaryOp object at 0x7da2047ea890> begin[:]
variable[updated] assign[=] call[name[update_property], parameter[name[tree_to_update], name[xpath_root], name[xpath_root], name[prop], name[values]]]
return[name[updated]] | keyword[def] identifier[update_complex] ( identifier[tree_to_update] , identifier[xpath_root] , identifier[xpath_map] , identifier[prop] , identifier[values] ):
literal[string]
identifier[remove_element] ( identifier[tree_to_update] , identifier[xpath_root] , keyword[True] )
identifier[values] = identifier[reduce_value] ( identifier[values] ,{})
keyword[if] keyword[not] identifier[values] :
identifier[updated] = identifier[update_property] ( identifier[tree_to_update] , identifier[xpath_root] , identifier[xpath_root] , identifier[prop] , identifier[values] )
keyword[else] :
keyword[for] identifier[subprop] , identifier[value] keyword[in] identifier[iteritems] ( identifier[values] ):
identifier[xpath] = identifier[xpath_map] [ identifier[subprop] ]
identifier[value] = identifier[get_default_for_complex_sub] ( identifier[prop] , identifier[subprop] , identifier[value] , identifier[xpath] )
identifier[update_property] ( identifier[tree_to_update] , keyword[None] , identifier[xpath] , identifier[subprop] , identifier[value] )
identifier[updated] = identifier[get_element] ( identifier[tree_to_update] , identifier[xpath_root] )
keyword[return] identifier[updated] | def update_complex(tree_to_update, xpath_root, xpath_map, prop, values):
"""
Updates and returns the updated complex Element parsed from tree_to_update.
:param tree_to_update: the XML tree compatible with element_utils to be updated
:param xpath_root: the XPATH location of the root of the complex Element
:param xpath_map: a Dictionary of XPATHs corresponding to the complex structure definition
:param prop: the property identifying the complex structure to be serialized
:param values: a Dictionary representing the complex structure to be updated
"""
remove_element(tree_to_update, xpath_root, True)
values = reduce_value(values, {})
if not values:
# Returns the elements corresponding to property removed from the tree
updated = update_property(tree_to_update, xpath_root, xpath_root, prop, values) # depends on [control=['if'], data=[]]
else:
for (subprop, value) in iteritems(values):
xpath = xpath_map[subprop]
value = get_default_for_complex_sub(prop, subprop, value, xpath)
update_property(tree_to_update, None, xpath, subprop, value) # depends on [control=['for'], data=[]]
updated = get_element(tree_to_update, xpath_root)
return updated |
def tn_max(tasmin, freq='YS'):
r"""Highest minimum temperature.
The maximum of daily minimum temperature.
Parameters
----------
tasmin : xarray.DataArray
Minimum daily temperature [℃] or [K]
freq : str, optional
Resampling frequency
Returns
-------
xarray.DataArray
Maximum of daily minimum temperature.
Notes
-----
Let :math:`TN_{ij}` be the minimum temperature at day :math:`i` of period :math:`j`. Then the maximum
daily minimum temperature for period :math:`j` is:
.. math::
TNx_j = max(TN_{ij})
"""
return tasmin.resample(time=freq).max(dim='time', keep_attrs=True) | def function[tn_max, parameter[tasmin, freq]]:
constant[Highest minimum temperature.
The maximum of daily minimum temperature.
Parameters
----------
tasmin : xarray.DataArray
Minimum daily temperature [℃] or [K]
freq : str, optional
Resampling frequency
Returns
-------
xarray.DataArray
Maximum of daily minimum temperature.
Notes
-----
Let :math:`TN_{ij}` be the minimum temperature at day :math:`i` of period :math:`j`. Then the maximum
daily minimum temperature for period :math:`j` is:
.. math::
TNx_j = max(TN_{ij})
]
return[call[call[name[tasmin].resample, parameter[]].max, parameter[]]] | keyword[def] identifier[tn_max] ( identifier[tasmin] , identifier[freq] = literal[string] ):
literal[string]
keyword[return] identifier[tasmin] . identifier[resample] ( identifier[time] = identifier[freq] ). identifier[max] ( identifier[dim] = literal[string] , identifier[keep_attrs] = keyword[True] ) | def tn_max(tasmin, freq='YS'):
"""Highest minimum temperature.
The maximum of daily minimum temperature.
Parameters
----------
tasmin : xarray.DataArray
Minimum daily temperature [℃] or [K]
freq : str, optional
Resampling frequency
Returns
-------
xarray.DataArray
Maximum of daily minimum temperature.
Notes
-----
Let :math:`TN_{ij}` be the minimum temperature at day :math:`i` of period :math:`j`. Then the maximum
daily minimum temperature for period :math:`j` is:
.. math::
TNx_j = max(TN_{ij})
"""
return tasmin.resample(time=freq).max(dim='time', keep_attrs=True) |
def impulse_deltav_general_fullplummerintegration(v,x,b,w,x0,v0,galpot,GM,rs,
tmaxfac=10.,N=1000,
integrate_method='symplec4_c'):
"""
NAME:
impulse_deltav_general_fullplummerintegration
PURPOSE:
calculate the delta velocity to due an encounter with a moving Plummer sphere and galactic potential relative to just in galactic potential
INPUT:
v - velocity of the stream (nstar,3)
x - position along the stream (nstar,3)
b - impact parameter
w - velocity of the subhalo (3)
x0 - position of closest approach (3)
v0 - velocity of stream at closest approach (3)
galpot - Galaxy Potential object
GM - mass of Plummer
rs - scale of Plummer
tmaxfac(10) - multiple of rs/fabs(w - v0) to use for time integration interval
N(1000) - number of forward integration points
integrate_method('symplec4_c') - orbit integrator to use (see Orbit.integrate)
OUTPUT:
deltav (nstar,3)
HISTORY:
2015-08-18 - SANDERS
"""
galpot= flatten_potential(galpot)
if len(v.shape) == 1: v= numpy.reshape(v,(1,3))
if len(x.shape) == 1: x= numpy.reshape(x,(1,3))
nstar,ndim=numpy.shape(v)
b0 = numpy.cross(w,v0)
b0 *= b/numpy.sqrt(numpy.sum(b0**2))
X = x0-b0
# Setup Plummer orbit
R, phi, z= bovy_coords.rect_to_cyl(X[0],X[1],X[2])
vR, vp, vz= bovy_coords.rect_to_cyl_vec(w[0],w[1],w[2],R,phi,z,cyl=True)
tmax = tmaxfac*rs/numpy.sqrt(numpy.sum((w-v0)**2))
times = numpy.linspace(0.,tmax,N)
dtimes = numpy.linspace(-tmax,tmax,2*N)
o = Orbit(vxvv=[R,-vR,-vp,z,-vz,phi])
o.integrate(times,galpot,method=integrate_method)
oplum = o(times[-1]).flip()
oplum.integrate(dtimes,galpot,method=integrate_method)
plumpot = MovingObjectPotential(orbit=oplum,
pot=PlummerPotential(amp=GM,b=rs))
# Now integrate each particle backwards in galaxy potential, forwards in combined potential and backwards again in galaxy and take diff
deltav = numpy.zeros((nstar,3))
R, phi, z= bovy_coords.rect_to_cyl(x[:,0],x[:,1],x[:,2])
vR, vp, vz= bovy_coords.rect_to_cyl_vec(v[:,0],v[:,1],v[:,2],
R,phi,z,cyl=True)
for i in range(nstar):
ostar= Orbit(vxvv=[R[i],-vR[i],-vp[i],z[i],-vz[i],phi[i]])
ostar.integrate(times,galpot,method=integrate_method)
oboth = ostar(times[-1]).flip()
oboth.integrate(dtimes,[galpot,plumpot],method=integrate_method)
ogalpot = oboth(times[-1]).flip()
ogalpot.integrate(times,galpot,method=integrate_method)
deltav[i][0] = -ogalpot.vx(times[-1]) - v[i][0]
deltav[i][1] = -ogalpot.vy(times[-1]) - v[i][1]
deltav[i][2] = -ogalpot.vz(times[-1]) - v[i][2]
return deltav | def function[impulse_deltav_general_fullplummerintegration, parameter[v, x, b, w, x0, v0, galpot, GM, rs, tmaxfac, N, integrate_method]]:
constant[
NAME:
impulse_deltav_general_fullplummerintegration
PURPOSE:
calculate the delta velocity to due an encounter with a moving Plummer sphere and galactic potential relative to just in galactic potential
INPUT:
v - velocity of the stream (nstar,3)
x - position along the stream (nstar,3)
b - impact parameter
w - velocity of the subhalo (3)
x0 - position of closest approach (3)
v0 - velocity of stream at closest approach (3)
galpot - Galaxy Potential object
GM - mass of Plummer
rs - scale of Plummer
tmaxfac(10) - multiple of rs/fabs(w - v0) to use for time integration interval
N(1000) - number of forward integration points
integrate_method('symplec4_c') - orbit integrator to use (see Orbit.integrate)
OUTPUT:
deltav (nstar,3)
HISTORY:
2015-08-18 - SANDERS
]
variable[galpot] assign[=] call[name[flatten_potential], parameter[name[galpot]]]
if compare[call[name[len], parameter[name[v].shape]] equal[==] constant[1]] begin[:]
variable[v] assign[=] call[name[numpy].reshape, parameter[name[v], tuple[[<ast.Constant object at 0x7da18bcc8ac0>, <ast.Constant object at 0x7da18bccaa10>]]]]
if compare[call[name[len], parameter[name[x].shape]] equal[==] constant[1]] begin[:]
variable[x] assign[=] call[name[numpy].reshape, parameter[name[x], tuple[[<ast.Constant object at 0x7da18bcc9b10>, <ast.Constant object at 0x7da18bccbc10>]]]]
<ast.Tuple object at 0x7da18bccbeb0> assign[=] call[name[numpy].shape, parameter[name[v]]]
variable[b0] assign[=] call[name[numpy].cross, parameter[name[w], name[v0]]]
<ast.AugAssign object at 0x7da18bcc8160>
variable[X] assign[=] binary_operation[name[x0] - name[b0]]
<ast.Tuple object at 0x7da18bcc90f0> assign[=] call[name[bovy_coords].rect_to_cyl, parameter[call[name[X]][constant[0]], call[name[X]][constant[1]], call[name[X]][constant[2]]]]
<ast.Tuple object at 0x7da18bccad40> assign[=] call[name[bovy_coords].rect_to_cyl_vec, parameter[call[name[w]][constant[0]], call[name[w]][constant[1]], call[name[w]][constant[2]], name[R], name[phi], name[z]]]
variable[tmax] assign[=] binary_operation[binary_operation[name[tmaxfac] * name[rs]] / call[name[numpy].sqrt, parameter[call[name[numpy].sum, parameter[binary_operation[binary_operation[name[w] - name[v0]] ** constant[2]]]]]]]
variable[times] assign[=] call[name[numpy].linspace, parameter[constant[0.0], name[tmax], name[N]]]
variable[dtimes] assign[=] call[name[numpy].linspace, parameter[<ast.UnaryOp object at 0x7da18bccb8e0>, name[tmax], binary_operation[constant[2] * name[N]]]]
variable[o] assign[=] call[name[Orbit], parameter[]]
call[name[o].integrate, parameter[name[times], name[galpot]]]
variable[oplum] assign[=] call[call[name[o], parameter[call[name[times]][<ast.UnaryOp object at 0x7da18bcc88e0>]]].flip, parameter[]]
call[name[oplum].integrate, parameter[name[dtimes], name[galpot]]]
variable[plumpot] assign[=] call[name[MovingObjectPotential], parameter[]]
variable[deltav] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Name object at 0x7da18bccb760>, <ast.Constant object at 0x7da18bcc9c30>]]]]
<ast.Tuple object at 0x7da18bcca500> assign[=] call[name[bovy_coords].rect_to_cyl, parameter[call[name[x]][tuple[[<ast.Slice object at 0x7da18bcca890>, <ast.Constant object at 0x7da18bcc9570>]]], call[name[x]][tuple[[<ast.Slice object at 0x7da18bccb2b0>, <ast.Constant object at 0x7da18bcc8520>]]], call[name[x]][tuple[[<ast.Slice object at 0x7da18bcc9870>, <ast.Constant object at 0x7da18bcc9a20>]]]]]
<ast.Tuple object at 0x7da18bccbb20> assign[=] call[name[bovy_coords].rect_to_cyl_vec, parameter[call[name[v]][tuple[[<ast.Slice object at 0x7da18bccb5e0>, <ast.Constant object at 0x7da1b0b09cf0>]]], call[name[v]][tuple[[<ast.Slice object at 0x7da1b0b09390>, <ast.Constant object at 0x7da1b0b093f0>]]], call[name[v]][tuple[[<ast.Slice object at 0x7da1b0b090c0>, <ast.Constant object at 0x7da1b0b09150>]]], name[R], name[phi], name[z]]]
for taget[name[i]] in starred[call[name[range], parameter[name[nstar]]]] begin[:]
variable[ostar] assign[=] call[name[Orbit], parameter[]]
call[name[ostar].integrate, parameter[name[times], name[galpot]]]
variable[oboth] assign[=] call[call[name[ostar], parameter[call[name[times]][<ast.UnaryOp object at 0x7da1b0b08b50>]]].flip, parameter[]]
call[name[oboth].integrate, parameter[name[dtimes], list[[<ast.Name object at 0x7da1b0b09de0>, <ast.Name object at 0x7da1b0b08df0>]]]]
variable[ogalpot] assign[=] call[call[name[oboth], parameter[call[name[times]][<ast.UnaryOp object at 0x7da1b0b09db0>]]].flip, parameter[]]
call[name[ogalpot].integrate, parameter[name[times], name[galpot]]]
call[call[name[deltav]][name[i]]][constant[0]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0b09ba0> - call[call[name[v]][name[i]]][constant[0]]]
call[call[name[deltav]][name[i]]][constant[1]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0b08eb0> - call[call[name[v]][name[i]]][constant[1]]]
call[call[name[deltav]][name[i]]][constant[2]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0b0abf0> - call[call[name[v]][name[i]]][constant[2]]]
return[name[deltav]] | keyword[def] identifier[impulse_deltav_general_fullplummerintegration] ( identifier[v] , identifier[x] , identifier[b] , identifier[w] , identifier[x0] , identifier[v0] , identifier[galpot] , identifier[GM] , identifier[rs] ,
identifier[tmaxfac] = literal[int] , identifier[N] = literal[int] ,
identifier[integrate_method] = literal[string] ):
literal[string]
identifier[galpot] = identifier[flatten_potential] ( identifier[galpot] )
keyword[if] identifier[len] ( identifier[v] . identifier[shape] )== literal[int] : identifier[v] = identifier[numpy] . identifier[reshape] ( identifier[v] ,( literal[int] , literal[int] ))
keyword[if] identifier[len] ( identifier[x] . identifier[shape] )== literal[int] : identifier[x] = identifier[numpy] . identifier[reshape] ( identifier[x] ,( literal[int] , literal[int] ))
identifier[nstar] , identifier[ndim] = identifier[numpy] . identifier[shape] ( identifier[v] )
identifier[b0] = identifier[numpy] . identifier[cross] ( identifier[w] , identifier[v0] )
identifier[b0] *= identifier[b] / identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[b0] ** literal[int] ))
identifier[X] = identifier[x0] - identifier[b0]
identifier[R] , identifier[phi] , identifier[z] = identifier[bovy_coords] . identifier[rect_to_cyl] ( identifier[X] [ literal[int] ], identifier[X] [ literal[int] ], identifier[X] [ literal[int] ])
identifier[vR] , identifier[vp] , identifier[vz] = identifier[bovy_coords] . identifier[rect_to_cyl_vec] ( identifier[w] [ literal[int] ], identifier[w] [ literal[int] ], identifier[w] [ literal[int] ], identifier[R] , identifier[phi] , identifier[z] , identifier[cyl] = keyword[True] )
identifier[tmax] = identifier[tmaxfac] * identifier[rs] / identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] (( identifier[w] - identifier[v0] )** literal[int] ))
identifier[times] = identifier[numpy] . identifier[linspace] ( literal[int] , identifier[tmax] , identifier[N] )
identifier[dtimes] = identifier[numpy] . identifier[linspace] (- identifier[tmax] , identifier[tmax] , literal[int] * identifier[N] )
identifier[o] = identifier[Orbit] ( identifier[vxvv] =[ identifier[R] ,- identifier[vR] ,- identifier[vp] , identifier[z] ,- identifier[vz] , identifier[phi] ])
identifier[o] . identifier[integrate] ( identifier[times] , identifier[galpot] , identifier[method] = identifier[integrate_method] )
identifier[oplum] = identifier[o] ( identifier[times] [- literal[int] ]). identifier[flip] ()
identifier[oplum] . identifier[integrate] ( identifier[dtimes] , identifier[galpot] , identifier[method] = identifier[integrate_method] )
identifier[plumpot] = identifier[MovingObjectPotential] ( identifier[orbit] = identifier[oplum] ,
identifier[pot] = identifier[PlummerPotential] ( identifier[amp] = identifier[GM] , identifier[b] = identifier[rs] ))
identifier[deltav] = identifier[numpy] . identifier[zeros] (( identifier[nstar] , literal[int] ))
identifier[R] , identifier[phi] , identifier[z] = identifier[bovy_coords] . identifier[rect_to_cyl] ( identifier[x] [:, literal[int] ], identifier[x] [:, literal[int] ], identifier[x] [:, literal[int] ])
identifier[vR] , identifier[vp] , identifier[vz] = identifier[bovy_coords] . identifier[rect_to_cyl_vec] ( identifier[v] [:, literal[int] ], identifier[v] [:, literal[int] ], identifier[v] [:, literal[int] ],
identifier[R] , identifier[phi] , identifier[z] , identifier[cyl] = keyword[True] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nstar] ):
identifier[ostar] = identifier[Orbit] ( identifier[vxvv] =[ identifier[R] [ identifier[i] ],- identifier[vR] [ identifier[i] ],- identifier[vp] [ identifier[i] ], identifier[z] [ identifier[i] ],- identifier[vz] [ identifier[i] ], identifier[phi] [ identifier[i] ]])
identifier[ostar] . identifier[integrate] ( identifier[times] , identifier[galpot] , identifier[method] = identifier[integrate_method] )
identifier[oboth] = identifier[ostar] ( identifier[times] [- literal[int] ]). identifier[flip] ()
identifier[oboth] . identifier[integrate] ( identifier[dtimes] ,[ identifier[galpot] , identifier[plumpot] ], identifier[method] = identifier[integrate_method] )
identifier[ogalpot] = identifier[oboth] ( identifier[times] [- literal[int] ]). identifier[flip] ()
identifier[ogalpot] . identifier[integrate] ( identifier[times] , identifier[galpot] , identifier[method] = identifier[integrate_method] )
identifier[deltav] [ identifier[i] ][ literal[int] ]=- identifier[ogalpot] . identifier[vx] ( identifier[times] [- literal[int] ])- identifier[v] [ identifier[i] ][ literal[int] ]
identifier[deltav] [ identifier[i] ][ literal[int] ]=- identifier[ogalpot] . identifier[vy] ( identifier[times] [- literal[int] ])- identifier[v] [ identifier[i] ][ literal[int] ]
identifier[deltav] [ identifier[i] ][ literal[int] ]=- identifier[ogalpot] . identifier[vz] ( identifier[times] [- literal[int] ])- identifier[v] [ identifier[i] ][ literal[int] ]
keyword[return] identifier[deltav] | def impulse_deltav_general_fullplummerintegration(v, x, b, w, x0, v0, galpot, GM, rs, tmaxfac=10.0, N=1000, integrate_method='symplec4_c'):
"""
NAME:
impulse_deltav_general_fullplummerintegration
PURPOSE:
calculate the delta velocity to due an encounter with a moving Plummer sphere and galactic potential relative to just in galactic potential
INPUT:
v - velocity of the stream (nstar,3)
x - position along the stream (nstar,3)
b - impact parameter
w - velocity of the subhalo (3)
x0 - position of closest approach (3)
v0 - velocity of stream at closest approach (3)
galpot - Galaxy Potential object
GM - mass of Plummer
rs - scale of Plummer
tmaxfac(10) - multiple of rs/fabs(w - v0) to use for time integration interval
N(1000) - number of forward integration points
integrate_method('symplec4_c') - orbit integrator to use (see Orbit.integrate)
OUTPUT:
deltav (nstar,3)
HISTORY:
2015-08-18 - SANDERS
"""
galpot = flatten_potential(galpot)
if len(v.shape) == 1:
v = numpy.reshape(v, (1, 3)) # depends on [control=['if'], data=[]]
if len(x.shape) == 1:
x = numpy.reshape(x, (1, 3)) # depends on [control=['if'], data=[]]
(nstar, ndim) = numpy.shape(v)
b0 = numpy.cross(w, v0)
b0 *= b / numpy.sqrt(numpy.sum(b0 ** 2))
X = x0 - b0
# Setup Plummer orbit
(R, phi, z) = bovy_coords.rect_to_cyl(X[0], X[1], X[2])
(vR, vp, vz) = bovy_coords.rect_to_cyl_vec(w[0], w[1], w[2], R, phi, z, cyl=True)
tmax = tmaxfac * rs / numpy.sqrt(numpy.sum((w - v0) ** 2))
times = numpy.linspace(0.0, tmax, N)
dtimes = numpy.linspace(-tmax, tmax, 2 * N)
o = Orbit(vxvv=[R, -vR, -vp, z, -vz, phi])
o.integrate(times, galpot, method=integrate_method)
oplum = o(times[-1]).flip()
oplum.integrate(dtimes, galpot, method=integrate_method)
plumpot = MovingObjectPotential(orbit=oplum, pot=PlummerPotential(amp=GM, b=rs))
# Now integrate each particle backwards in galaxy potential, forwards in combined potential and backwards again in galaxy and take diff
deltav = numpy.zeros((nstar, 3))
(R, phi, z) = bovy_coords.rect_to_cyl(x[:, 0], x[:, 1], x[:, 2])
(vR, vp, vz) = bovy_coords.rect_to_cyl_vec(v[:, 0], v[:, 1], v[:, 2], R, phi, z, cyl=True)
for i in range(nstar):
ostar = Orbit(vxvv=[R[i], -vR[i], -vp[i], z[i], -vz[i], phi[i]])
ostar.integrate(times, galpot, method=integrate_method)
oboth = ostar(times[-1]).flip()
oboth.integrate(dtimes, [galpot, plumpot], method=integrate_method)
ogalpot = oboth(times[-1]).flip()
ogalpot.integrate(times, galpot, method=integrate_method)
deltav[i][0] = -ogalpot.vx(times[-1]) - v[i][0]
deltav[i][1] = -ogalpot.vy(times[-1]) - v[i][1]
deltav[i][2] = -ogalpot.vz(times[-1]) - v[i][2] # depends on [control=['for'], data=['i']]
return deltav |
def disconnect_sync(self, conn_id):
"""Synchronously disconnect from a connected device
Args:
conn_id (int): A unique identifier that will refer to this connection
Returns:
dict: A dictionary with two elements
'success': a bool with the result of the connection attempt
'failure_reason': a string with the reason for the failure if we failed
"""
done = threading.Event()
result = {}
def disconnect_done(conn_id, adapter_id, status, reason):
result['success'] = status
result['failure_reason'] = reason
done.set()
self.disconnect_async(conn_id, disconnect_done)
done.wait()
return result | def function[disconnect_sync, parameter[self, conn_id]]:
constant[Synchronously disconnect from a connected device
Args:
conn_id (int): A unique identifier that will refer to this connection
Returns:
dict: A dictionary with two elements
'success': a bool with the result of the connection attempt
'failure_reason': a string with the reason for the failure if we failed
]
variable[done] assign[=] call[name[threading].Event, parameter[]]
variable[result] assign[=] dictionary[[], []]
def function[disconnect_done, parameter[conn_id, adapter_id, status, reason]]:
call[name[result]][constant[success]] assign[=] name[status]
call[name[result]][constant[failure_reason]] assign[=] name[reason]
call[name[done].set, parameter[]]
call[name[self].disconnect_async, parameter[name[conn_id], name[disconnect_done]]]
call[name[done].wait, parameter[]]
return[name[result]] | keyword[def] identifier[disconnect_sync] ( identifier[self] , identifier[conn_id] ):
literal[string]
identifier[done] = identifier[threading] . identifier[Event] ()
identifier[result] ={}
keyword[def] identifier[disconnect_done] ( identifier[conn_id] , identifier[adapter_id] , identifier[status] , identifier[reason] ):
identifier[result] [ literal[string] ]= identifier[status]
identifier[result] [ literal[string] ]= identifier[reason]
identifier[done] . identifier[set] ()
identifier[self] . identifier[disconnect_async] ( identifier[conn_id] , identifier[disconnect_done] )
identifier[done] . identifier[wait] ()
keyword[return] identifier[result] | def disconnect_sync(self, conn_id):
"""Synchronously disconnect from a connected device
Args:
conn_id (int): A unique identifier that will refer to this connection
Returns:
dict: A dictionary with two elements
'success': a bool with the result of the connection attempt
'failure_reason': a string with the reason for the failure if we failed
"""
done = threading.Event()
result = {}
def disconnect_done(conn_id, adapter_id, status, reason):
result['success'] = status
result['failure_reason'] = reason
done.set()
self.disconnect_async(conn_id, disconnect_done)
done.wait()
return result |
def build_header(
filename, disposition='attachment', filename_compat=None
):
"""Generate a Content-Disposition header for a given filename.
For legacy clients that don't understand the filename* parameter,
a filename_compat value may be given.
It should either be ascii-only (recommended) or iso-8859-1 only.
In the later case it should be a character string
(unicode in Python 2).
Options for generating filename_compat (only useful for legacy clients):
- ignore (will only send filename*);
- strip accents using unicode's decomposing normalisations,
which can be done from unicode data (stdlib), and keep only ascii;
- use the ascii transliteration tables from Unidecode (PyPI);
- use iso-8859-1
Ignore is the safest, and can be used to trigger a fallback
to the document location (which can be percent-encoded utf-8
if you control the URLs).
See https://tools.ietf.org/html/rfc6266#appendix-D
"""
# While this method exists, it could also sanitize the filename
# by rejecting slashes or other weirdness that might upset a receiver.
if disposition != 'attachment':
assert is_token(disposition)
rv = disposition
if is_token(filename):
rv += '; filename=%s' % (filename, )
return rv
elif is_ascii(filename) and is_lws_safe(filename):
qd_filename = qd_quote(filename)
rv += '; filename="%s"' % (qd_filename, )
if qd_filename == filename:
# RFC 6266 claims some implementations are iffy on qdtext's
# backslash-escaping, we'll include filename* in that case.
return rv
elif filename_compat:
if is_token(filename_compat):
rv += '; filename=%s' % (filename_compat, )
else:
assert is_lws_safe(filename_compat)
rv += '; filename="%s"' % (qd_quote(filename_compat), )
# alnum are already considered always-safe, but the rest isn't.
# Python encodes ~ when it shouldn't, for example.
rv += "; filename*=utf-8''%s" % (percent_encode(
filename, safe=attr_chars_nonalnum, encoding='utf-8'), )
# This will only encode filename_compat, if it used non-ascii iso-8859-1.
return rv.encode('iso-8859-1') | def function[build_header, parameter[filename, disposition, filename_compat]]:
constant[Generate a Content-Disposition header for a given filename.
For legacy clients that don't understand the filename* parameter,
a filename_compat value may be given.
It should either be ascii-only (recommended) or iso-8859-1 only.
In the later case it should be a character string
(unicode in Python 2).
Options for generating filename_compat (only useful for legacy clients):
- ignore (will only send filename*);
- strip accents using unicode's decomposing normalisations,
which can be done from unicode data (stdlib), and keep only ascii;
- use the ascii transliteration tables from Unidecode (PyPI);
- use iso-8859-1
Ignore is the safest, and can be used to trigger a fallback
to the document location (which can be percent-encoded utf-8
if you control the URLs).
See https://tools.ietf.org/html/rfc6266#appendix-D
]
if compare[name[disposition] not_equal[!=] constant[attachment]] begin[:]
assert[call[name[is_token], parameter[name[disposition]]]]
variable[rv] assign[=] name[disposition]
if call[name[is_token], parameter[name[filename]]] begin[:]
<ast.AugAssign object at 0x7da1b101d780>
return[name[rv]]
<ast.AugAssign object at 0x7da1b11dba30>
return[call[name[rv].encode, parameter[constant[iso-8859-1]]]] | keyword[def] identifier[build_header] (
identifier[filename] , identifier[disposition] = literal[string] , identifier[filename_compat] = keyword[None]
):
literal[string]
keyword[if] identifier[disposition] != literal[string] :
keyword[assert] identifier[is_token] ( identifier[disposition] )
identifier[rv] = identifier[disposition]
keyword[if] identifier[is_token] ( identifier[filename] ):
identifier[rv] += literal[string] %( identifier[filename] ,)
keyword[return] identifier[rv]
keyword[elif] identifier[is_ascii] ( identifier[filename] ) keyword[and] identifier[is_lws_safe] ( identifier[filename] ):
identifier[qd_filename] = identifier[qd_quote] ( identifier[filename] )
identifier[rv] += literal[string] %( identifier[qd_filename] ,)
keyword[if] identifier[qd_filename] == identifier[filename] :
keyword[return] identifier[rv]
keyword[elif] identifier[filename_compat] :
keyword[if] identifier[is_token] ( identifier[filename_compat] ):
identifier[rv] += literal[string] %( identifier[filename_compat] ,)
keyword[else] :
keyword[assert] identifier[is_lws_safe] ( identifier[filename_compat] )
identifier[rv] += literal[string] %( identifier[qd_quote] ( identifier[filename_compat] ),)
identifier[rv] += literal[string] %( identifier[percent_encode] (
identifier[filename] , identifier[safe] = identifier[attr_chars_nonalnum] , identifier[encoding] = literal[string] ),)
keyword[return] identifier[rv] . identifier[encode] ( literal[string] ) | def build_header(filename, disposition='attachment', filename_compat=None):
"""Generate a Content-Disposition header for a given filename.
For legacy clients that don't understand the filename* parameter,
a filename_compat value may be given.
It should either be ascii-only (recommended) or iso-8859-1 only.
In the later case it should be a character string
(unicode in Python 2).
Options for generating filename_compat (only useful for legacy clients):
- ignore (will only send filename*);
- strip accents using unicode's decomposing normalisations,
which can be done from unicode data (stdlib), and keep only ascii;
- use the ascii transliteration tables from Unidecode (PyPI);
- use iso-8859-1
Ignore is the safest, and can be used to trigger a fallback
to the document location (which can be percent-encoded utf-8
if you control the URLs).
See https://tools.ietf.org/html/rfc6266#appendix-D
"""
# While this method exists, it could also sanitize the filename
# by rejecting slashes or other weirdness that might upset a receiver.
if disposition != 'attachment':
assert is_token(disposition) # depends on [control=['if'], data=['disposition']]
rv = disposition
if is_token(filename):
rv += '; filename=%s' % (filename,)
return rv # depends on [control=['if'], data=[]]
elif is_ascii(filename) and is_lws_safe(filename):
qd_filename = qd_quote(filename)
rv += '; filename="%s"' % (qd_filename,)
if qd_filename == filename:
# RFC 6266 claims some implementations are iffy on qdtext's
# backslash-escaping, we'll include filename* in that case.
return rv # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif filename_compat:
if is_token(filename_compat):
rv += '; filename=%s' % (filename_compat,) # depends on [control=['if'], data=[]]
else:
assert is_lws_safe(filename_compat)
rv += '; filename="%s"' % (qd_quote(filename_compat),) # depends on [control=['if'], data=[]]
# alnum are already considered always-safe, but the rest isn't.
# Python encodes ~ when it shouldn't, for example.
rv += "; filename*=utf-8''%s" % (percent_encode(filename, safe=attr_chars_nonalnum, encoding='utf-8'),)
# This will only encode filename_compat, if it used non-ascii iso-8859-1.
return rv.encode('iso-8859-1') |
def _set_formatter(self):
"""
Inspects config and sets the name of the formatter to either "json" or "text"
as instance attr. If not present in config, default is "text"
"""
if hasattr(self._config, "formatter") and self._config.formatter == "json":
self._formatter = "json"
else:
self._formatter = "text" | def function[_set_formatter, parameter[self]]:
constant[
Inspects config and sets the name of the formatter to either "json" or "text"
as instance attr. If not present in config, default is "text"
]
if <ast.BoolOp object at 0x7da1b1496560> begin[:]
name[self]._formatter assign[=] constant[json] | keyword[def] identifier[_set_formatter] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] . identifier[_config] , literal[string] ) keyword[and] identifier[self] . identifier[_config] . identifier[formatter] == literal[string] :
identifier[self] . identifier[_formatter] = literal[string]
keyword[else] :
identifier[self] . identifier[_formatter] = literal[string] | def _set_formatter(self):
"""
Inspects config and sets the name of the formatter to either "json" or "text"
as instance attr. If not present in config, default is "text"
"""
if hasattr(self._config, 'formatter') and self._config.formatter == 'json':
self._formatter = 'json' # depends on [control=['if'], data=[]]
else:
self._formatter = 'text' |
def delete_item(self, item):
''' removes an item from the db '''
for relation, dst in self.relations_of(item, True):
self.delete_relation(item, relation, dst)
#print(item, relation, dst)
for src, relation in self.relations_to(item, True):
self.delete_relation(src, relation, item)
#print(src, relation, item)
h = self._item_hash(item)
if item in self:
#print('deleting item:', item)
self.nodes[h].clear()
del self.nodes[h] | def function[delete_item, parameter[self, item]]:
constant[ removes an item from the db ]
for taget[tuple[[<ast.Name object at 0x7da20c76ece0>, <ast.Name object at 0x7da20c76c970>]]] in starred[call[name[self].relations_of, parameter[name[item], constant[True]]]] begin[:]
call[name[self].delete_relation, parameter[name[item], name[relation], name[dst]]]
for taget[tuple[[<ast.Name object at 0x7da20c76f5e0>, <ast.Name object at 0x7da20c76eb00>]]] in starred[call[name[self].relations_to, parameter[name[item], constant[True]]]] begin[:]
call[name[self].delete_relation, parameter[name[src], name[relation], name[item]]]
variable[h] assign[=] call[name[self]._item_hash, parameter[name[item]]]
if compare[name[item] in name[self]] begin[:]
call[call[name[self].nodes][name[h]].clear, parameter[]]
<ast.Delete object at 0x7da20c76ebc0> | keyword[def] identifier[delete_item] ( identifier[self] , identifier[item] ):
literal[string]
keyword[for] identifier[relation] , identifier[dst] keyword[in] identifier[self] . identifier[relations_of] ( identifier[item] , keyword[True] ):
identifier[self] . identifier[delete_relation] ( identifier[item] , identifier[relation] , identifier[dst] )
keyword[for] identifier[src] , identifier[relation] keyword[in] identifier[self] . identifier[relations_to] ( identifier[item] , keyword[True] ):
identifier[self] . identifier[delete_relation] ( identifier[src] , identifier[relation] , identifier[item] )
identifier[h] = identifier[self] . identifier[_item_hash] ( identifier[item] )
keyword[if] identifier[item] keyword[in] identifier[self] :
identifier[self] . identifier[nodes] [ identifier[h] ]. identifier[clear] ()
keyword[del] identifier[self] . identifier[nodes] [ identifier[h] ] | def delete_item(self, item):
""" removes an item from the db """
for (relation, dst) in self.relations_of(item, True):
self.delete_relation(item, relation, dst) # depends on [control=['for'], data=[]]
#print(item, relation, dst)
for (src, relation) in self.relations_to(item, True):
self.delete_relation(src, relation, item) # depends on [control=['for'], data=[]]
#print(src, relation, item)
h = self._item_hash(item)
if item in self:
#print('deleting item:', item)
self.nodes[h].clear()
del self.nodes[h] # depends on [control=['if'], data=['self']] |
def transform(self, X, lenscale=None):
"""
Apply the random basis to X.
Parameters
----------
X: ndarray
(N, d) array of observations where N is the number of samples, and
d is the dimensionality of X.
lenscale: scalar or ndarray, optional
scalar or array of shape (d,) length scales (one for each dimension
of X). If not input, this uses the value of the initial length
scale.
Returns
-------
ndarray:
of shape (N, 2*nbases) where nbases is number of random bases to
use, given in the constructor.
"""
N, D = X.shape
lenscale = self._check_dim(D, lenscale)[:, np.newaxis]
WX = np.dot(X, self.W / lenscale)
return np.hstack((np.cos(WX), np.sin(WX))) / np.sqrt(self.n) | def function[transform, parameter[self, X, lenscale]]:
constant[
Apply the random basis to X.
Parameters
----------
X: ndarray
(N, d) array of observations where N is the number of samples, and
d is the dimensionality of X.
lenscale: scalar or ndarray, optional
scalar or array of shape (d,) length scales (one for each dimension
of X). If not input, this uses the value of the initial length
scale.
Returns
-------
ndarray:
of shape (N, 2*nbases) where nbases is number of random bases to
use, given in the constructor.
]
<ast.Tuple object at 0x7da1b2585c30> assign[=] name[X].shape
variable[lenscale] assign[=] call[call[name[self]._check_dim, parameter[name[D], name[lenscale]]]][tuple[[<ast.Slice object at 0x7da1b2585030>, <ast.Attribute object at 0x7da1b25845e0>]]]
variable[WX] assign[=] call[name[np].dot, parameter[name[X], binary_operation[name[self].W / name[lenscale]]]]
return[binary_operation[call[name[np].hstack, parameter[tuple[[<ast.Call object at 0x7da18c4cd660>, <ast.Call object at 0x7da18c4cd360>]]]] / call[name[np].sqrt, parameter[name[self].n]]]] | keyword[def] identifier[transform] ( identifier[self] , identifier[X] , identifier[lenscale] = keyword[None] ):
literal[string]
identifier[N] , identifier[D] = identifier[X] . identifier[shape]
identifier[lenscale] = identifier[self] . identifier[_check_dim] ( identifier[D] , identifier[lenscale] )[:, identifier[np] . identifier[newaxis] ]
identifier[WX] = identifier[np] . identifier[dot] ( identifier[X] , identifier[self] . identifier[W] / identifier[lenscale] )
keyword[return] identifier[np] . identifier[hstack] (( identifier[np] . identifier[cos] ( identifier[WX] ), identifier[np] . identifier[sin] ( identifier[WX] )))/ identifier[np] . identifier[sqrt] ( identifier[self] . identifier[n] ) | def transform(self, X, lenscale=None):
"""
Apply the random basis to X.
Parameters
----------
X: ndarray
(N, d) array of observations where N is the number of samples, and
d is the dimensionality of X.
lenscale: scalar or ndarray, optional
scalar or array of shape (d,) length scales (one for each dimension
of X). If not input, this uses the value of the initial length
scale.
Returns
-------
ndarray:
of shape (N, 2*nbases) where nbases is number of random bases to
use, given in the constructor.
"""
(N, D) = X.shape
lenscale = self._check_dim(D, lenscale)[:, np.newaxis]
WX = np.dot(X, self.W / lenscale)
return np.hstack((np.cos(WX), np.sin(WX))) / np.sqrt(self.n) |
def block_sep1(self, Y):
r"""Separate variable into component corresponding to
:math:`\mathbf{y}_1` in :math:`\mathbf{y}\;\;`.
"""
# This method is overridden because we have to change the
# mechanism for combining the Y0 and Y1 blocks into a single
# array (see comment in the __init__ method).
shp = Y.shape[0:self.cri.axisC] + self.y1shp[self.cri.axisC:]
return Y[(slice(None),)*self.cri.axisC +
(slice(self.y0I, None),)].reshape(shp) | def function[block_sep1, parameter[self, Y]]:
constant[Separate variable into component corresponding to
:math:`\mathbf{y}_1` in :math:`\mathbf{y}\;\;`.
]
variable[shp] assign[=] binary_operation[call[name[Y].shape][<ast.Slice object at 0x7da1b06d2800>] + call[name[self].y1shp][<ast.Slice object at 0x7da1b06d2410>]]
return[call[call[name[Y]][binary_operation[binary_operation[tuple[[<ast.Call object at 0x7da1b06d2ef0>]] * name[self].cri.axisC] + tuple[[<ast.Call object at 0x7da1b06d3070>]]]].reshape, parameter[name[shp]]]] | keyword[def] identifier[block_sep1] ( identifier[self] , identifier[Y] ):
literal[string]
identifier[shp] = identifier[Y] . identifier[shape] [ literal[int] : identifier[self] . identifier[cri] . identifier[axisC] ]+ identifier[self] . identifier[y1shp] [ identifier[self] . identifier[cri] . identifier[axisC] :]
keyword[return] identifier[Y] [( identifier[slice] ( keyword[None] ),)* identifier[self] . identifier[cri] . identifier[axisC] +
( identifier[slice] ( identifier[self] . identifier[y0I] , keyword[None] ),)]. identifier[reshape] ( identifier[shp] ) | def block_sep1(self, Y):
"""Separate variable into component corresponding to
:math:`\\mathbf{y}_1` in :math:`\\mathbf{y}\\;\\;`.
"""
# This method is overridden because we have to change the
# mechanism for combining the Y0 and Y1 blocks into a single
# array (see comment in the __init__ method).
shp = Y.shape[0:self.cri.axisC] + self.y1shp[self.cri.axisC:]
return Y[(slice(None),) * self.cri.axisC + (slice(self.y0I, None),)].reshape(shp) |
def cidr2block(cidr):
"""Convert a CIDR notation ip address into a tuple containing the network
block start and end addresses.
>>> cidr2block('127.0.0.1/32')
('127.0.0.1', '127.0.0.1')
>>> cidr2block('127/8')
('127.0.0.0', '127.255.255.255')
>>> cidr2block('127.0.1/16')
('127.0.0.0', '127.0.255.255')
>>> cidr2block('127.1/24')
('127.1.0.0', '127.1.0.255')
>>> cidr2block('127.0.0.3/29')
('127.0.0.0', '127.0.0.7')
>>> cidr2block('127/0')
('0.0.0.0', '255.255.255.255')
:param cidr: CIDR notation ip address (eg. '127.0.0.1/8').
:type cidr: str
:returns: Tuple of block (start, end) or ``None`` if invalid.
:raises: TypeError
"""
if not validate_cidr(cidr):
return None
ip, prefix = cidr.split('/')
prefix = int(prefix)
# convert dotted-quad ip to base network number
network = ip2network(ip)
return _block_from_ip_and_prefix(network, prefix) | def function[cidr2block, parameter[cidr]]:
constant[Convert a CIDR notation ip address into a tuple containing the network
block start and end addresses.
>>> cidr2block('127.0.0.1/32')
('127.0.0.1', '127.0.0.1')
>>> cidr2block('127/8')
('127.0.0.0', '127.255.255.255')
>>> cidr2block('127.0.1/16')
('127.0.0.0', '127.0.255.255')
>>> cidr2block('127.1/24')
('127.1.0.0', '127.1.0.255')
>>> cidr2block('127.0.0.3/29')
('127.0.0.0', '127.0.0.7')
>>> cidr2block('127/0')
('0.0.0.0', '255.255.255.255')
:param cidr: CIDR notation ip address (eg. '127.0.0.1/8').
:type cidr: str
:returns: Tuple of block (start, end) or ``None`` if invalid.
:raises: TypeError
]
if <ast.UnaryOp object at 0x7da18bc70b20> begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da18bc73520> assign[=] call[name[cidr].split, parameter[constant[/]]]
variable[prefix] assign[=] call[name[int], parameter[name[prefix]]]
variable[network] assign[=] call[name[ip2network], parameter[name[ip]]]
return[call[name[_block_from_ip_and_prefix], parameter[name[network], name[prefix]]]] | keyword[def] identifier[cidr2block] ( identifier[cidr] ):
literal[string]
keyword[if] keyword[not] identifier[validate_cidr] ( identifier[cidr] ):
keyword[return] keyword[None]
identifier[ip] , identifier[prefix] = identifier[cidr] . identifier[split] ( literal[string] )
identifier[prefix] = identifier[int] ( identifier[prefix] )
identifier[network] = identifier[ip2network] ( identifier[ip] )
keyword[return] identifier[_block_from_ip_and_prefix] ( identifier[network] , identifier[prefix] ) | def cidr2block(cidr):
"""Convert a CIDR notation ip address into a tuple containing the network
block start and end addresses.
>>> cidr2block('127.0.0.1/32')
('127.0.0.1', '127.0.0.1')
>>> cidr2block('127/8')
('127.0.0.0', '127.255.255.255')
>>> cidr2block('127.0.1/16')
('127.0.0.0', '127.0.255.255')
>>> cidr2block('127.1/24')
('127.1.0.0', '127.1.0.255')
>>> cidr2block('127.0.0.3/29')
('127.0.0.0', '127.0.0.7')
>>> cidr2block('127/0')
('0.0.0.0', '255.255.255.255')
:param cidr: CIDR notation ip address (eg. '127.0.0.1/8').
:type cidr: str
:returns: Tuple of block (start, end) or ``None`` if invalid.
:raises: TypeError
"""
if not validate_cidr(cidr):
return None # depends on [control=['if'], data=[]]
(ip, prefix) = cidr.split('/')
prefix = int(prefix)
# convert dotted-quad ip to base network number
network = ip2network(ip)
return _block_from_ip_and_prefix(network, prefix) |
def set_basic_params(
self, check_interval_busy=None,
busy_max=None, busy_min=None,
idle_cycles_max=None, idle_cycles_penalty=None,
verbose=None):
"""
:param int check_interval_busy: Interval (sec) to check worker busyness.
:param int busy_max: Maximum busyness (percents). Every time the calculated busyness
is higher than this value, uWSGI will spawn new workers. Default: 50.
:param int busy_min: Minimum busyness (percents). If busyness is below this value,
the app is considered in an "idle cycle" and uWSGI will start counting them.
Once we reach needed number of idle cycles uWSGI will kill one worker. Default: 25.
:param int idle_cycles_max: This option tells uWSGI how many idle cycles are allowed
before stopping a worker.
:param int idle_cycles_penalty: Number of idle cycles to add to ``idle_cycles_max``
in case worker spawned too early. Default is 1.
:param bool verbose: Enables debug logs for this algo.
"""
self._set('cheaper-overload', check_interval_busy)
self._set('cheaper-busyness-max', busy_max)
self._set('cheaper-busyness-min', busy_min)
self._set('cheaper-busyness-multiplier', idle_cycles_max)
self._set('cheaper-busyness-penalty', idle_cycles_penalty)
self._set('cheaper-busyness-verbose', verbose, cast=bool)
return self._section | def function[set_basic_params, parameter[self, check_interval_busy, busy_max, busy_min, idle_cycles_max, idle_cycles_penalty, verbose]]:
constant[
:param int check_interval_busy: Interval (sec) to check worker busyness.
:param int busy_max: Maximum busyness (percents). Every time the calculated busyness
is higher than this value, uWSGI will spawn new workers. Default: 50.
:param int busy_min: Minimum busyness (percents). If busyness is below this value,
the app is considered in an "idle cycle" and uWSGI will start counting them.
Once we reach needed number of idle cycles uWSGI will kill one worker. Default: 25.
:param int idle_cycles_max: This option tells uWSGI how many idle cycles are allowed
before stopping a worker.
:param int idle_cycles_penalty: Number of idle cycles to add to ``idle_cycles_max``
in case worker spawned too early. Default is 1.
:param bool verbose: Enables debug logs for this algo.
]
call[name[self]._set, parameter[constant[cheaper-overload], name[check_interval_busy]]]
call[name[self]._set, parameter[constant[cheaper-busyness-max], name[busy_max]]]
call[name[self]._set, parameter[constant[cheaper-busyness-min], name[busy_min]]]
call[name[self]._set, parameter[constant[cheaper-busyness-multiplier], name[idle_cycles_max]]]
call[name[self]._set, parameter[constant[cheaper-busyness-penalty], name[idle_cycles_penalty]]]
call[name[self]._set, parameter[constant[cheaper-busyness-verbose], name[verbose]]]
return[name[self]._section] | keyword[def] identifier[set_basic_params] (
identifier[self] , identifier[check_interval_busy] = keyword[None] ,
identifier[busy_max] = keyword[None] , identifier[busy_min] = keyword[None] ,
identifier[idle_cycles_max] = keyword[None] , identifier[idle_cycles_penalty] = keyword[None] ,
identifier[verbose] = keyword[None] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , identifier[check_interval_busy] )
identifier[self] . identifier[_set] ( literal[string] , identifier[busy_max] )
identifier[self] . identifier[_set] ( literal[string] , identifier[busy_min] )
identifier[self] . identifier[_set] ( literal[string] , identifier[idle_cycles_max] )
identifier[self] . identifier[_set] ( literal[string] , identifier[idle_cycles_penalty] )
identifier[self] . identifier[_set] ( literal[string] , identifier[verbose] , identifier[cast] = identifier[bool] )
keyword[return] identifier[self] . identifier[_section] | def set_basic_params(self, check_interval_busy=None, busy_max=None, busy_min=None, idle_cycles_max=None, idle_cycles_penalty=None, verbose=None):
"""
:param int check_interval_busy: Interval (sec) to check worker busyness.
:param int busy_max: Maximum busyness (percents). Every time the calculated busyness
is higher than this value, uWSGI will spawn new workers. Default: 50.
:param int busy_min: Minimum busyness (percents). If busyness is below this value,
the app is considered in an "idle cycle" and uWSGI will start counting them.
Once we reach needed number of idle cycles uWSGI will kill one worker. Default: 25.
:param int idle_cycles_max: This option tells uWSGI how many idle cycles are allowed
before stopping a worker.
:param int idle_cycles_penalty: Number of idle cycles to add to ``idle_cycles_max``
in case worker spawned too early. Default is 1.
:param bool verbose: Enables debug logs for this algo.
"""
self._set('cheaper-overload', check_interval_busy)
self._set('cheaper-busyness-max', busy_max)
self._set('cheaper-busyness-min', busy_min)
self._set('cheaper-busyness-multiplier', idle_cycles_max)
self._set('cheaper-busyness-penalty', idle_cycles_penalty)
self._set('cheaper-busyness-verbose', verbose, cast=bool)
return self._section |
def insert_paraphrase_information(germanet_db, wiktionary_files):
'''
Reads in the given GermaNet relation file and inserts its contents
into the given MongoDB database.
Arguments:
- `germanet_db`: a pymongo.database.Database object
- `wiktionary_files`:
'''
num_paraphrases = 0
# cache the lexunits while we work on them
lexunits = {}
for filename in wiktionary_files:
paraphrases = read_paraphrase_file(filename)
num_paraphrases += len(paraphrases)
for paraphrase in paraphrases:
if paraphrase['lexUnitId'] not in lexunits:
lexunits[paraphrase['lexUnitId']] = \
germanet_db.lexunits.find_one(
{'id': paraphrase['lexUnitId']})
lexunit = lexunits[paraphrase['lexUnitId']]
if 'paraphrases' not in lexunit:
lexunit['paraphrases'] = []
lexunit['paraphrases'].append(paraphrase)
for lexunit in lexunits.values():
germanet_db.lexunits.save(lexunit)
print('Inserted {0} wiktionary paraphrases.'.format(num_paraphrases)) | def function[insert_paraphrase_information, parameter[germanet_db, wiktionary_files]]:
constant[
Reads in the given GermaNet relation file and inserts its contents
into the given MongoDB database.
Arguments:
- `germanet_db`: a pymongo.database.Database object
- `wiktionary_files`:
]
variable[num_paraphrases] assign[=] constant[0]
variable[lexunits] assign[=] dictionary[[], []]
for taget[name[filename]] in starred[name[wiktionary_files]] begin[:]
variable[paraphrases] assign[=] call[name[read_paraphrase_file], parameter[name[filename]]]
<ast.AugAssign object at 0x7da1b1041e40>
for taget[name[paraphrase]] in starred[name[paraphrases]] begin[:]
if compare[call[name[paraphrase]][constant[lexUnitId]] <ast.NotIn object at 0x7da2590d7190> name[lexunits]] begin[:]
call[name[lexunits]][call[name[paraphrase]][constant[lexUnitId]]] assign[=] call[name[germanet_db].lexunits.find_one, parameter[dictionary[[<ast.Constant object at 0x7da1b1041b10>], [<ast.Subscript object at 0x7da1b1043fa0>]]]]
variable[lexunit] assign[=] call[name[lexunits]][call[name[paraphrase]][constant[lexUnitId]]]
if compare[constant[paraphrases] <ast.NotIn object at 0x7da2590d7190> name[lexunit]] begin[:]
call[name[lexunit]][constant[paraphrases]] assign[=] list[[]]
call[call[name[lexunit]][constant[paraphrases]].append, parameter[name[paraphrase]]]
for taget[name[lexunit]] in starred[call[name[lexunits].values, parameter[]]] begin[:]
call[name[germanet_db].lexunits.save, parameter[name[lexunit]]]
call[name[print], parameter[call[constant[Inserted {0} wiktionary paraphrases.].format, parameter[name[num_paraphrases]]]]] | keyword[def] identifier[insert_paraphrase_information] ( identifier[germanet_db] , identifier[wiktionary_files] ):
literal[string]
identifier[num_paraphrases] = literal[int]
identifier[lexunits] ={}
keyword[for] identifier[filename] keyword[in] identifier[wiktionary_files] :
identifier[paraphrases] = identifier[read_paraphrase_file] ( identifier[filename] )
identifier[num_paraphrases] += identifier[len] ( identifier[paraphrases] )
keyword[for] identifier[paraphrase] keyword[in] identifier[paraphrases] :
keyword[if] identifier[paraphrase] [ literal[string] ] keyword[not] keyword[in] identifier[lexunits] :
identifier[lexunits] [ identifier[paraphrase] [ literal[string] ]]= identifier[germanet_db] . identifier[lexunits] . identifier[find_one] (
{ literal[string] : identifier[paraphrase] [ literal[string] ]})
identifier[lexunit] = identifier[lexunits] [ identifier[paraphrase] [ literal[string] ]]
keyword[if] literal[string] keyword[not] keyword[in] identifier[lexunit] :
identifier[lexunit] [ literal[string] ]=[]
identifier[lexunit] [ literal[string] ]. identifier[append] ( identifier[paraphrase] )
keyword[for] identifier[lexunit] keyword[in] identifier[lexunits] . identifier[values] ():
identifier[germanet_db] . identifier[lexunits] . identifier[save] ( identifier[lexunit] )
identifier[print] ( literal[string] . identifier[format] ( identifier[num_paraphrases] )) | def insert_paraphrase_information(germanet_db, wiktionary_files):
"""
Reads in the given GermaNet relation file and inserts its contents
into the given MongoDB database.
Arguments:
- `germanet_db`: a pymongo.database.Database object
- `wiktionary_files`:
"""
num_paraphrases = 0
# cache the lexunits while we work on them
lexunits = {}
for filename in wiktionary_files:
paraphrases = read_paraphrase_file(filename)
num_paraphrases += len(paraphrases)
for paraphrase in paraphrases:
if paraphrase['lexUnitId'] not in lexunits:
lexunits[paraphrase['lexUnitId']] = germanet_db.lexunits.find_one({'id': paraphrase['lexUnitId']}) # depends on [control=['if'], data=['lexunits']]
lexunit = lexunits[paraphrase['lexUnitId']]
if 'paraphrases' not in lexunit:
lexunit['paraphrases'] = [] # depends on [control=['if'], data=['lexunit']]
lexunit['paraphrases'].append(paraphrase) # depends on [control=['for'], data=['paraphrase']] # depends on [control=['for'], data=['filename']]
for lexunit in lexunits.values():
germanet_db.lexunits.save(lexunit) # depends on [control=['for'], data=['lexunit']]
print('Inserted {0} wiktionary paraphrases.'.format(num_paraphrases)) |
def delete_tag(context, id, tag_id):
"""delete_tag(context, id, tag_id)
Delete a tag from a job.
>>> dcictl job-delete-tag [OPTIONS]
:param string id: ID of the job to attach the meta to [required]
:param string tag_id: ID of the tag to be removed from the job [required]
"""
result = job.delete_tag(context, id=id, tag_id=tag_id)
if result.status_code == 204:
utils.print_json({'id': id, 'message': 'Tag removed.'})
else:
utils.format_output(result, context.format) | def function[delete_tag, parameter[context, id, tag_id]]:
constant[delete_tag(context, id, tag_id)
Delete a tag from a job.
>>> dcictl job-delete-tag [OPTIONS]
:param string id: ID of the job to attach the meta to [required]
:param string tag_id: ID of the tag to be removed from the job [required]
]
variable[result] assign[=] call[name[job].delete_tag, parameter[name[context]]]
if compare[name[result].status_code equal[==] constant[204]] begin[:]
call[name[utils].print_json, parameter[dictionary[[<ast.Constant object at 0x7da1b23925c0>, <ast.Constant object at 0x7da1b2390a30>], [<ast.Name object at 0x7da1b2391cf0>, <ast.Constant object at 0x7da1b2393340>]]]] | keyword[def] identifier[delete_tag] ( identifier[context] , identifier[id] , identifier[tag_id] ):
literal[string]
identifier[result] = identifier[job] . identifier[delete_tag] ( identifier[context] , identifier[id] = identifier[id] , identifier[tag_id] = identifier[tag_id] )
keyword[if] identifier[result] . identifier[status_code] == literal[int] :
identifier[utils] . identifier[print_json] ({ literal[string] : identifier[id] , literal[string] : literal[string] })
keyword[else] :
identifier[utils] . identifier[format_output] ( identifier[result] , identifier[context] . identifier[format] ) | def delete_tag(context, id, tag_id):
"""delete_tag(context, id, tag_id)
Delete a tag from a job.
>>> dcictl job-delete-tag [OPTIONS]
:param string id: ID of the job to attach the meta to [required]
:param string tag_id: ID of the tag to be removed from the job [required]
"""
result = job.delete_tag(context, id=id, tag_id=tag_id)
if result.status_code == 204:
utils.print_json({'id': id, 'message': 'Tag removed.'}) # depends on [control=['if'], data=[]]
else:
utils.format_output(result, context.format) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.