code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
py31compat.makedirs(dirname, exist_ok=True) | def function[ensure_directory, parameter[path]]:
constant[Ensure that the parent directory of `path` exists]
variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[path]]]
call[name[py31compat].makedirs, parameter[name[dirname]]] | keyword[def] identifier[ensure_directory] ( identifier[path] ):
literal[string]
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
identifier[py31compat] . identifier[makedirs] ( identifier[dirname] , identifier[exist_ok] = keyword[True] ) | def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
py31compat.makedirs(dirname, exist_ok=True) |
def set_doc_data_lics(self, doc, lics):
"""Sets the document data license.
Raises value error if malformed value, CardinalityError
if already defined.
"""
if not self.doc_data_lics_set:
self.doc_data_lics_set = True
if validations.validate_data_lics(lics):
doc.data_license = document.License.from_identifier(lics)
return True
else:
raise SPDXValueError('Document::DataLicense')
else:
raise CardinalityError('Document::DataLicense') | def function[set_doc_data_lics, parameter[self, doc, lics]]:
constant[Sets the document data license.
Raises value error if malformed value, CardinalityError
if already defined.
]
if <ast.UnaryOp object at 0x7da1b0158ac0> begin[:]
name[self].doc_data_lics_set assign[=] constant[True]
if call[name[validations].validate_data_lics, parameter[name[lics]]] begin[:]
name[doc].data_license assign[=] call[name[document].License.from_identifier, parameter[name[lics]]]
return[constant[True]] | keyword[def] identifier[set_doc_data_lics] ( identifier[self] , identifier[doc] , identifier[lics] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[doc_data_lics_set] :
identifier[self] . identifier[doc_data_lics_set] = keyword[True]
keyword[if] identifier[validations] . identifier[validate_data_lics] ( identifier[lics] ):
identifier[doc] . identifier[data_license] = identifier[document] . identifier[License] . identifier[from_identifier] ( identifier[lics] )
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[SPDXValueError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[CardinalityError] ( literal[string] ) | def set_doc_data_lics(self, doc, lics):
"""Sets the document data license.
Raises value error if malformed value, CardinalityError
if already defined.
"""
if not self.doc_data_lics_set:
self.doc_data_lics_set = True
if validations.validate_data_lics(lics):
doc.data_license = document.License.from_identifier(lics)
return True # depends on [control=['if'], data=[]]
else:
raise SPDXValueError('Document::DataLicense') # depends on [control=['if'], data=[]]
else:
raise CardinalityError('Document::DataLicense') |
def _build_keys(self, slug, date=None, granularity='all'):
"""Builds redis keys used to store metrics.
* ``slug`` -- a slug used for a metric, e.g. "user-signups"
* ``date`` -- (optional) A ``datetime.datetime`` object used to
generate the time period for the metric. If omitted, the current date
and time (in UTC) will be used.
* ``granularity`` -- Must be one of: "all" (default), "yearly",
"monthly", "weekly", "daily", "hourly", "minutes", or "seconds".
Returns a list of strings.
"""
slug = slugify(slug) # Ensure slugs have a consistent format
if date is None:
date = datetime.utcnow()
patts = self._build_key_patterns(slug, date)
if granularity == "all":
return list(patts.values())
return [patts[granularity]] | def function[_build_keys, parameter[self, slug, date, granularity]]:
constant[Builds redis keys used to store metrics.
* ``slug`` -- a slug used for a metric, e.g. "user-signups"
* ``date`` -- (optional) A ``datetime.datetime`` object used to
generate the time period for the metric. If omitted, the current date
and time (in UTC) will be used.
* ``granularity`` -- Must be one of: "all" (default), "yearly",
"monthly", "weekly", "daily", "hourly", "minutes", or "seconds".
Returns a list of strings.
]
variable[slug] assign[=] call[name[slugify], parameter[name[slug]]]
if compare[name[date] is constant[None]] begin[:]
variable[date] assign[=] call[name[datetime].utcnow, parameter[]]
variable[patts] assign[=] call[name[self]._build_key_patterns, parameter[name[slug], name[date]]]
if compare[name[granularity] equal[==] constant[all]] begin[:]
return[call[name[list], parameter[call[name[patts].values, parameter[]]]]]
return[list[[<ast.Subscript object at 0x7da1b0fd08b0>]]] | keyword[def] identifier[_build_keys] ( identifier[self] , identifier[slug] , identifier[date] = keyword[None] , identifier[granularity] = literal[string] ):
literal[string]
identifier[slug] = identifier[slugify] ( identifier[slug] )
keyword[if] identifier[date] keyword[is] keyword[None] :
identifier[date] = identifier[datetime] . identifier[utcnow] ()
identifier[patts] = identifier[self] . identifier[_build_key_patterns] ( identifier[slug] , identifier[date] )
keyword[if] identifier[granularity] == literal[string] :
keyword[return] identifier[list] ( identifier[patts] . identifier[values] ())
keyword[return] [ identifier[patts] [ identifier[granularity] ]] | def _build_keys(self, slug, date=None, granularity='all'):
"""Builds redis keys used to store metrics.
* ``slug`` -- a slug used for a metric, e.g. "user-signups"
* ``date`` -- (optional) A ``datetime.datetime`` object used to
generate the time period for the metric. If omitted, the current date
and time (in UTC) will be used.
* ``granularity`` -- Must be one of: "all" (default), "yearly",
"monthly", "weekly", "daily", "hourly", "minutes", or "seconds".
Returns a list of strings.
"""
slug = slugify(slug) # Ensure slugs have a consistent format
if date is None:
date = datetime.utcnow() # depends on [control=['if'], data=['date']]
patts = self._build_key_patterns(slug, date)
if granularity == 'all':
return list(patts.values()) # depends on [control=['if'], data=[]]
return [patts[granularity]] |
def p_Statement(p):
'''
Statement : StatementWithoutTerminator Terminator
'''
if len(p) < 3:
term = Terminator('')
else:
term = p[2]
p[0] = Statement(p[1], term) | def function[p_Statement, parameter[p]]:
constant[
Statement : StatementWithoutTerminator Terminator
]
if compare[call[name[len], parameter[name[p]]] less[<] constant[3]] begin[:]
variable[term] assign[=] call[name[Terminator], parameter[constant[]]]
call[name[p]][constant[0]] assign[=] call[name[Statement], parameter[call[name[p]][constant[1]], name[term]]] | keyword[def] identifier[p_Statement] ( identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )< literal[int] :
identifier[term] = identifier[Terminator] ( literal[string] )
keyword[else] :
identifier[term] = identifier[p] [ literal[int] ]
identifier[p] [ literal[int] ]= identifier[Statement] ( identifier[p] [ literal[int] ], identifier[term] ) | def p_Statement(p):
"""
Statement : StatementWithoutTerminator Terminator
"""
if len(p) < 3:
term = Terminator('') # depends on [control=['if'], data=[]]
else:
term = p[2]
p[0] = Statement(p[1], term) |
def where(self, within_of=None, inplace=False, **kwargs):
"""Return indices that met the conditions"""
masks = super(System, self).where(inplace=inplace, **kwargs)
def index_to_mask(index, n):
val = np.zeros(n, dtype='bool')
val[index] = True
return val
def masks_and(dict1, dict2):
return {k: dict1[k] & index_to_mask(dict2[k], len(dict1[k])) for k in dict1 }
if within_of is not None:
if self.box_vectors is None:
raise Exception('Only periodic distance supported')
thr, ref = within_of
if isinstance(ref, int):
a = self.r_array[ref][np.newaxis, np.newaxis, :] # (1, 1, 3,)
elif len(ref) == 1:
a = self.r_array[ref][np.newaxis, :] # (1, 1, 3)
else:
a = self.r_array[ref][:, np.newaxis, :] # (2, 1, 3)
b = self.r_array[np.newaxis, :, :]
dist = periodic_distance(a, b,
periodic=self.box_vectors.diagonal())
atoms = (dist <= thr).sum(axis=0, dtype='bool')
m = self._propagate_dim(atoms, 'atom')
masks = masks_and(masks, m)
return masks | def function[where, parameter[self, within_of, inplace]]:
constant[Return indices that met the conditions]
variable[masks] assign[=] call[call[name[super], parameter[name[System], name[self]]].where, parameter[]]
def function[index_to_mask, parameter[index, n]]:
variable[val] assign[=] call[name[np].zeros, parameter[name[n]]]
call[name[val]][name[index]] assign[=] constant[True]
return[name[val]]
def function[masks_and, parameter[dict1, dict2]]:
return[<ast.DictComp object at 0x7da18dc997e0>]
if compare[name[within_of] is_not constant[None]] begin[:]
if compare[name[self].box_vectors is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc9b5b0>
<ast.Tuple object at 0x7da18dc9b370> assign[=] name[within_of]
if call[name[isinstance], parameter[name[ref], name[int]]] begin[:]
variable[a] assign[=] call[call[name[self].r_array][name[ref]]][tuple[[<ast.Attribute object at 0x7da18dc9ad70>, <ast.Attribute object at 0x7da18dc9ba30>, <ast.Slice object at 0x7da20c6e6080>]]]
variable[b] assign[=] call[name[self].r_array][tuple[[<ast.Attribute object at 0x7da20c6e6110>, <ast.Slice object at 0x7da20c6e5000>, <ast.Slice object at 0x7da20c6e7f70>]]]
variable[dist] assign[=] call[name[periodic_distance], parameter[name[a], name[b]]]
variable[atoms] assign[=] call[compare[name[dist] less_or_equal[<=] name[thr]].sum, parameter[]]
variable[m] assign[=] call[name[self]._propagate_dim, parameter[name[atoms], constant[atom]]]
variable[masks] assign[=] call[name[masks_and], parameter[name[masks], name[m]]]
return[name[masks]] | keyword[def] identifier[where] ( identifier[self] , identifier[within_of] = keyword[None] , identifier[inplace] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[masks] = identifier[super] ( identifier[System] , identifier[self] ). identifier[where] ( identifier[inplace] = identifier[inplace] ,** identifier[kwargs] )
keyword[def] identifier[index_to_mask] ( identifier[index] , identifier[n] ):
identifier[val] = identifier[np] . identifier[zeros] ( identifier[n] , identifier[dtype] = literal[string] )
identifier[val] [ identifier[index] ]= keyword[True]
keyword[return] identifier[val]
keyword[def] identifier[masks_and] ( identifier[dict1] , identifier[dict2] ):
keyword[return] { identifier[k] : identifier[dict1] [ identifier[k] ]& identifier[index_to_mask] ( identifier[dict2] [ identifier[k] ], identifier[len] ( identifier[dict1] [ identifier[k] ])) keyword[for] identifier[k] keyword[in] identifier[dict1] }
keyword[if] identifier[within_of] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[box_vectors] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[thr] , identifier[ref] = identifier[within_of]
keyword[if] identifier[isinstance] ( identifier[ref] , identifier[int] ):
identifier[a] = identifier[self] . identifier[r_array] [ identifier[ref] ][ identifier[np] . identifier[newaxis] , identifier[np] . identifier[newaxis] ,:]
keyword[elif] identifier[len] ( identifier[ref] )== literal[int] :
identifier[a] = identifier[self] . identifier[r_array] [ identifier[ref] ][ identifier[np] . identifier[newaxis] ,:]
keyword[else] :
identifier[a] = identifier[self] . identifier[r_array] [ identifier[ref] ][:, identifier[np] . identifier[newaxis] ,:]
identifier[b] = identifier[self] . identifier[r_array] [ identifier[np] . identifier[newaxis] ,:,:]
identifier[dist] = identifier[periodic_distance] ( identifier[a] , identifier[b] ,
identifier[periodic] = identifier[self] . identifier[box_vectors] . identifier[diagonal] ())
identifier[atoms] =( identifier[dist] <= identifier[thr] ). identifier[sum] ( identifier[axis] = literal[int] , identifier[dtype] = literal[string] )
identifier[m] = identifier[self] . identifier[_propagate_dim] ( identifier[atoms] , literal[string] )
identifier[masks] = identifier[masks_and] ( identifier[masks] , identifier[m] )
keyword[return] identifier[masks] | def where(self, within_of=None, inplace=False, **kwargs):
"""Return indices that met the conditions"""
masks = super(System, self).where(inplace=inplace, **kwargs)
def index_to_mask(index, n):
val = np.zeros(n, dtype='bool')
val[index] = True
return val
def masks_and(dict1, dict2):
return {k: dict1[k] & index_to_mask(dict2[k], len(dict1[k])) for k in dict1}
if within_of is not None:
if self.box_vectors is None:
raise Exception('Only periodic distance supported') # depends on [control=['if'], data=[]]
(thr, ref) = within_of
if isinstance(ref, int):
a = self.r_array[ref][np.newaxis, np.newaxis, :] # (1, 1, 3,) # depends on [control=['if'], data=[]]
elif len(ref) == 1:
a = self.r_array[ref][np.newaxis, :] # (1, 1, 3) # depends on [control=['if'], data=[]]
else:
a = self.r_array[ref][:, np.newaxis, :] # (2, 1, 3)
b = self.r_array[np.newaxis, :, :]
dist = periodic_distance(a, b, periodic=self.box_vectors.diagonal())
atoms = (dist <= thr).sum(axis=0, dtype='bool')
m = self._propagate_dim(atoms, 'atom')
masks = masks_and(masks, m) # depends on [control=['if'], data=['within_of']]
return masks |
def access_token(self, cookie):
"""Make and store access token as proxy for the access cookie.
Create an access token to act as a proxy for access cookie, add it to
the dict of accepted access tokens with (cookie, current timestamp)
as the value. Return the access token. Return None if cookie is not set.
"""
if (cookie):
token = self._generate_random_string(self.access_tokens)
self.access_tokens[token] = (cookie, int(time.time()))
return token
else:
return None | def function[access_token, parameter[self, cookie]]:
constant[Make and store access token as proxy for the access cookie.
Create an access token to act as a proxy for access cookie, add it to
the dict of accepted access tokens with (cookie, current timestamp)
as the value. Return the access token. Return None if cookie is not set.
]
if name[cookie] begin[:]
variable[token] assign[=] call[name[self]._generate_random_string, parameter[name[self].access_tokens]]
call[name[self].access_tokens][name[token]] assign[=] tuple[[<ast.Name object at 0x7da18bc72920>, <ast.Call object at 0x7da18bc71630>]]
return[name[token]] | keyword[def] identifier[access_token] ( identifier[self] , identifier[cookie] ):
literal[string]
keyword[if] ( identifier[cookie] ):
identifier[token] = identifier[self] . identifier[_generate_random_string] ( identifier[self] . identifier[access_tokens] )
identifier[self] . identifier[access_tokens] [ identifier[token] ]=( identifier[cookie] , identifier[int] ( identifier[time] . identifier[time] ()))
keyword[return] identifier[token]
keyword[else] :
keyword[return] keyword[None] | def access_token(self, cookie):
"""Make and store access token as proxy for the access cookie.
Create an access token to act as a proxy for access cookie, add it to
the dict of accepted access tokens with (cookie, current timestamp)
as the value. Return the access token. Return None if cookie is not set.
"""
if cookie:
token = self._generate_random_string(self.access_tokens)
self.access_tokens[token] = (cookie, int(time.time()))
return token # depends on [control=['if'], data=[]]
else:
return None |
def corr_dw_v1(self):
"""Adjust the water stage drop to the highest value allowed and correct
the associated fluxes.
Note that method |corr_dw_v1| calls the method `interp_v` of the
respective application model. Hence the requirements of the actual
`interp_v` need to be considered additionally.
Required control parameter:
|MaxDW|
Required derived parameters:
|llake_derived.TOY|
|Seconds|
Required flux sequence:
|QZ|
Updated flux sequence:
|llake_fluxes.QA|
Updated state sequences:
|llake_states.W|
|llake_states.V|
Basic Restriction:
:math:`W_{old} - W_{new} \\leq MaxDW`
Examples:
In preparation for the following examples, define a short simulation
time period with a simulation step size of 12 hours and initialize
the required model object:
>>> from hydpy import pub
>>> pub.timegrids = '2000.01.01', '2000.01.04', '12h'
>>> from hydpy.models.llake import *
>>> parameterstep('1d')
>>> derived.toy.update()
>>> derived.seconds.update()
Select the first half of the second day of January as the simulation
step relevant for the following examples:
>>> model.idx_sim = pub.timegrids.init['2000.01.02']
The following tests are based on method |interp_v_v1| for the
interpolation of the stored water volume based on the corrected
water stage:
>>> model.interp_v = model.interp_v_v1
For the sake of simplicity, the underlying `w`-`v` relationship is
assumed to be linear:
>>> n(2.)
>>> w(0., 1.)
>>> v(0., 1e6)
The maximum drop in water stage for the first half of the second
day of January is set to 0.4 m/d. Note that, due to the difference
between the parameter step size and the simulation step size, the
actual value used for calculation is 0.2 m/12h:
>>> maxdw(_1_1_18=.1,
... _1_2_6=.4,
... _1_2_18=.1)
>>> maxdw
maxdw(toy_1_1_18_0_0=0.1,
toy_1_2_6_0_0=0.4,
toy_1_2_18_0_0=0.1)
>>> from hydpy import round_
>>> round_(maxdw.value[2])
0.2
Define old and new water stages and volumes in agreement with the
given linear relationship:
>>> states.w.old = 1.
>>> states.v.old = 1e6
>>> states.w.new = .9
>>> states.v.new = 9e5
Also define an inflow and an outflow value. Note the that the latter
is set to zero, which is inconsistent with the actual water stage drop
defined above, but done for didactic reasons:
>>> fluxes.qz = 1.
>>> fluxes.qa = 0.
Calling the |corr_dw_v1| method does not change the values of
either of following sequences, as the actual drop (0.1 m/12h) is
smaller than the allowed drop (0.2 m/12h):
>>> model.corr_dw_v1()
>>> states.w
w(0.9)
>>> states.v
v(900000.0)
>>> fluxes.qa
qa(0.0)
Note that the values given above are not recalculated, which can
clearly be seen for the lake outflow, which is still zero.
Through setting the new value of the water stage to 0.6 m, the actual
drop (0.4 m/12h) exceeds the allowed drop (0.2 m/12h). Hence the
water stage is trimmed and the other values are recalculated:
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.8)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
Through setting the maximum water stage drop to zero, method
|corr_dw_v1| is effectively disabled. Regardless of the actual
change in water stage, no trimming or recalculating is performed:
>>> maxdw.toy_01_02_06 = 0.
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.6)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
old = self.sequences.states.fastaccess_old
new = self.sequences.states.fastaccess_new
idx = der.toy[self.idx_sim]
if (con.maxdw[idx] > 0.) and ((old.w-new.w) > con.maxdw[idx]):
new.w = old.w-con.maxdw[idx]
self.interp_v()
flu.qa = flu.qz+(old.v-new.v)/der.seconds | def function[corr_dw_v1, parameter[self]]:
constant[Adjust the water stage drop to the highest value allowed and correct
the associated fluxes.
Note that method |corr_dw_v1| calls the method `interp_v` of the
respective application model. Hence the requirements of the actual
`interp_v` need to be considered additionally.
Required control parameter:
|MaxDW|
Required derived parameters:
|llake_derived.TOY|
|Seconds|
Required flux sequence:
|QZ|
Updated flux sequence:
|llake_fluxes.QA|
Updated state sequences:
|llake_states.W|
|llake_states.V|
Basic Restriction:
:math:`W_{old} - W_{new} \leq MaxDW`
Examples:
In preparation for the following examples, define a short simulation
time period with a simulation step size of 12 hours and initialize
the required model object:
>>> from hydpy import pub
>>> pub.timegrids = '2000.01.01', '2000.01.04', '12h'
>>> from hydpy.models.llake import *
>>> parameterstep('1d')
>>> derived.toy.update()
>>> derived.seconds.update()
Select the first half of the second day of January as the simulation
step relevant for the following examples:
>>> model.idx_sim = pub.timegrids.init['2000.01.02']
The following tests are based on method |interp_v_v1| for the
interpolation of the stored water volume based on the corrected
water stage:
>>> model.interp_v = model.interp_v_v1
For the sake of simplicity, the underlying `w`-`v` relationship is
assumed to be linear:
>>> n(2.)
>>> w(0., 1.)
>>> v(0., 1e6)
The maximum drop in water stage for the first half of the second
day of January is set to 0.4 m/d. Note that, due to the difference
between the parameter step size and the simulation step size, the
actual value used for calculation is 0.2 m/12h:
>>> maxdw(_1_1_18=.1,
... _1_2_6=.4,
... _1_2_18=.1)
>>> maxdw
maxdw(toy_1_1_18_0_0=0.1,
toy_1_2_6_0_0=0.4,
toy_1_2_18_0_0=0.1)
>>> from hydpy import round_
>>> round_(maxdw.value[2])
0.2
Define old and new water stages and volumes in agreement with the
given linear relationship:
>>> states.w.old = 1.
>>> states.v.old = 1e6
>>> states.w.new = .9
>>> states.v.new = 9e5
Also define an inflow and an outflow value. Note the that the latter
is set to zero, which is inconsistent with the actual water stage drop
defined above, but done for didactic reasons:
>>> fluxes.qz = 1.
>>> fluxes.qa = 0.
Calling the |corr_dw_v1| method does not change the values of
either of following sequences, as the actual drop (0.1 m/12h) is
smaller than the allowed drop (0.2 m/12h):
>>> model.corr_dw_v1()
>>> states.w
w(0.9)
>>> states.v
v(900000.0)
>>> fluxes.qa
qa(0.0)
Note that the values given above are not recalculated, which can
clearly be seen for the lake outflow, which is still zero.
Through setting the new value of the water stage to 0.6 m, the actual
drop (0.4 m/12h) exceeds the allowed drop (0.2 m/12h). Hence the
water stage is trimmed and the other values are recalculated:
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.8)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
Through setting the maximum water stage drop to zero, method
|corr_dw_v1| is effectively disabled. Regardless of the actual
change in water stage, no trimming or recalculating is performed:
>>> maxdw.toy_01_02_06 = 0.
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.6)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
]
variable[con] assign[=] name[self].parameters.control.fastaccess
variable[der] assign[=] name[self].parameters.derived.fastaccess
variable[flu] assign[=] name[self].sequences.fluxes.fastaccess
variable[old] assign[=] name[self].sequences.states.fastaccess_old
variable[new] assign[=] name[self].sequences.states.fastaccess_new
variable[idx] assign[=] call[name[der].toy][name[self].idx_sim]
if <ast.BoolOp object at 0x7da2044c1960> begin[:]
name[new].w assign[=] binary_operation[name[old].w - call[name[con].maxdw][name[idx]]]
call[name[self].interp_v, parameter[]]
name[flu].qa assign[=] binary_operation[name[flu].qz + binary_operation[binary_operation[name[old].v - name[new].v] / name[der].seconds]] | keyword[def] identifier[corr_dw_v1] ( identifier[self] ):
literal[string]
identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess]
identifier[der] = identifier[self] . identifier[parameters] . identifier[derived] . identifier[fastaccess]
identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess]
identifier[old] = identifier[self] . identifier[sequences] . identifier[states] . identifier[fastaccess_old]
identifier[new] = identifier[self] . identifier[sequences] . identifier[states] . identifier[fastaccess_new]
identifier[idx] = identifier[der] . identifier[toy] [ identifier[self] . identifier[idx_sim] ]
keyword[if] ( identifier[con] . identifier[maxdw] [ identifier[idx] ]> literal[int] ) keyword[and] (( identifier[old] . identifier[w] - identifier[new] . identifier[w] )> identifier[con] . identifier[maxdw] [ identifier[idx] ]):
identifier[new] . identifier[w] = identifier[old] . identifier[w] - identifier[con] . identifier[maxdw] [ identifier[idx] ]
identifier[self] . identifier[interp_v] ()
identifier[flu] . identifier[qa] = identifier[flu] . identifier[qz] +( identifier[old] . identifier[v] - identifier[new] . identifier[v] )/ identifier[der] . identifier[seconds] | def corr_dw_v1(self):
"""Adjust the water stage drop to the highest value allowed and correct
the associated fluxes.
Note that method |corr_dw_v1| calls the method `interp_v` of the
respective application model. Hence the requirements of the actual
`interp_v` need to be considered additionally.
Required control parameter:
|MaxDW|
Required derived parameters:
|llake_derived.TOY|
|Seconds|
Required flux sequence:
|QZ|
Updated flux sequence:
|llake_fluxes.QA|
Updated state sequences:
|llake_states.W|
|llake_states.V|
Basic Restriction:
:math:`W_{old} - W_{new} \\leq MaxDW`
Examples:
In preparation for the following examples, define a short simulation
time period with a simulation step size of 12 hours and initialize
the required model object:
>>> from hydpy import pub
>>> pub.timegrids = '2000.01.01', '2000.01.04', '12h'
>>> from hydpy.models.llake import *
>>> parameterstep('1d')
>>> derived.toy.update()
>>> derived.seconds.update()
Select the first half of the second day of January as the simulation
step relevant for the following examples:
>>> model.idx_sim = pub.timegrids.init['2000.01.02']
The following tests are based on method |interp_v_v1| for the
interpolation of the stored water volume based on the corrected
water stage:
>>> model.interp_v = model.interp_v_v1
For the sake of simplicity, the underlying `w`-`v` relationship is
assumed to be linear:
>>> n(2.)
>>> w(0., 1.)
>>> v(0., 1e6)
The maximum drop in water stage for the first half of the second
day of January is set to 0.4 m/d. Note that, due to the difference
between the parameter step size and the simulation step size, the
actual value used for calculation is 0.2 m/12h:
>>> maxdw(_1_1_18=.1,
... _1_2_6=.4,
... _1_2_18=.1)
>>> maxdw
maxdw(toy_1_1_18_0_0=0.1,
toy_1_2_6_0_0=0.4,
toy_1_2_18_0_0=0.1)
>>> from hydpy import round_
>>> round_(maxdw.value[2])
0.2
Define old and new water stages and volumes in agreement with the
given linear relationship:
>>> states.w.old = 1.
>>> states.v.old = 1e6
>>> states.w.new = .9
>>> states.v.new = 9e5
Also define an inflow and an outflow value. Note the that the latter
is set to zero, which is inconsistent with the actual water stage drop
defined above, but done for didactic reasons:
>>> fluxes.qz = 1.
>>> fluxes.qa = 0.
Calling the |corr_dw_v1| method does not change the values of
either of following sequences, as the actual drop (0.1 m/12h) is
smaller than the allowed drop (0.2 m/12h):
>>> model.corr_dw_v1()
>>> states.w
w(0.9)
>>> states.v
v(900000.0)
>>> fluxes.qa
qa(0.0)
Note that the values given above are not recalculated, which can
clearly be seen for the lake outflow, which is still zero.
Through setting the new value of the water stage to 0.6 m, the actual
drop (0.4 m/12h) exceeds the allowed drop (0.2 m/12h). Hence the
water stage is trimmed and the other values are recalculated:
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.8)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
Through setting the maximum water stage drop to zero, method
|corr_dw_v1| is effectively disabled. Regardless of the actual
change in water stage, no trimming or recalculating is performed:
>>> maxdw.toy_01_02_06 = 0.
>>> states.w.new = .6
>>> model.corr_dw_v1()
>>> states.w
w(0.6)
>>> states.v
v(800000.0)
>>> fluxes.qa
qa(5.62963)
"""
con = self.parameters.control.fastaccess
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
old = self.sequences.states.fastaccess_old
new = self.sequences.states.fastaccess_new
idx = der.toy[self.idx_sim]
if con.maxdw[idx] > 0.0 and old.w - new.w > con.maxdw[idx]:
new.w = old.w - con.maxdw[idx]
self.interp_v()
flu.qa = flu.qz + (old.v - new.v) / der.seconds # depends on [control=['if'], data=[]] |
def value_get(method_name):
"""
Creates a getter that will call value's method with specified name
using the context's key as first argument.
@param method_name: the name of a method belonging to the value.
@type method_name: str
"""
def value_get(value, context, **_params):
method = getattr(value, method_name)
return _get(method, context["key"], (), {})
return value_get | def function[value_get, parameter[method_name]]:
constant[
Creates a getter that will call value's method with specified name
using the context's key as first argument.
@param method_name: the name of a method belonging to the value.
@type method_name: str
]
def function[value_get, parameter[value, context]]:
variable[method] assign[=] call[name[getattr], parameter[name[value], name[method_name]]]
return[call[name[_get], parameter[name[method], call[name[context]][constant[key]], tuple[[]], dictionary[[], []]]]]
return[name[value_get]] | keyword[def] identifier[value_get] ( identifier[method_name] ):
literal[string]
keyword[def] identifier[value_get] ( identifier[value] , identifier[context] ,** identifier[_params] ):
identifier[method] = identifier[getattr] ( identifier[value] , identifier[method_name] )
keyword[return] identifier[_get] ( identifier[method] , identifier[context] [ literal[string] ],(),{})
keyword[return] identifier[value_get] | def value_get(method_name):
"""
Creates a getter that will call value's method with specified name
using the context's key as first argument.
@param method_name: the name of a method belonging to the value.
@type method_name: str
"""
def value_get(value, context, **_params):
method = getattr(value, method_name)
return _get(method, context['key'], (), {})
return value_get |
def expunge(self, instance=None):
'''Remove ``instance`` from this :class:`Session`. If ``instance``
is not given, it removes all instances from this :class:`Session`.'''
if instance is not None:
sm = self._models.get(instance._meta)
if sm:
return sm.expunge(instance)
else:
self._models.clear() | def function[expunge, parameter[self, instance]]:
constant[Remove ``instance`` from this :class:`Session`. If ``instance``
is not given, it removes all instances from this :class:`Session`.]
if compare[name[instance] is_not constant[None]] begin[:]
variable[sm] assign[=] call[name[self]._models.get, parameter[name[instance]._meta]]
if name[sm] begin[:]
return[call[name[sm].expunge, parameter[name[instance]]]] | keyword[def] identifier[expunge] ( identifier[self] , identifier[instance] = keyword[None] ):
literal[string]
keyword[if] identifier[instance] keyword[is] keyword[not] keyword[None] :
identifier[sm] = identifier[self] . identifier[_models] . identifier[get] ( identifier[instance] . identifier[_meta] )
keyword[if] identifier[sm] :
keyword[return] identifier[sm] . identifier[expunge] ( identifier[instance] )
keyword[else] :
identifier[self] . identifier[_models] . identifier[clear] () | def expunge(self, instance=None):
"""Remove ``instance`` from this :class:`Session`. If ``instance``
is not given, it removes all instances from this :class:`Session`."""
if instance is not None:
sm = self._models.get(instance._meta)
if sm:
return sm.expunge(instance) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['instance']]
else:
self._models.clear() |
def assign_to(self, obj):
"""Assign `x` and `y` to an object that has properties `x` and `y`."""
obj.x = self.x
obj.y = self.y | def function[assign_to, parameter[self, obj]]:
constant[Assign `x` and `y` to an object that has properties `x` and `y`.]
name[obj].x assign[=] name[self].x
name[obj].y assign[=] name[self].y | keyword[def] identifier[assign_to] ( identifier[self] , identifier[obj] ):
literal[string]
identifier[obj] . identifier[x] = identifier[self] . identifier[x]
identifier[obj] . identifier[y] = identifier[self] . identifier[y] | def assign_to(self, obj):
"""Assign `x` and `y` to an object that has properties `x` and `y`."""
obj.x = self.x
obj.y = self.y |
def _evictStaleDevices(self):
"""
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a
while.
"""
while self.running:
expiredDeviceIds = [key for key, value in self.devices.items() if value.hasExpired()]
for key in expiredDeviceIds:
logger.warning("Device timeout, removing " + key)
del self.devices[key]
time.sleep(1)
# TODO send reset after a device fails
logger.warning("DeviceCaretaker is now shutdown") | def function[_evictStaleDevices, parameter[self]]:
constant[
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a
while.
]
while name[self].running begin[:]
variable[expiredDeviceIds] assign[=] <ast.ListComp object at 0x7da1b0e815d0>
for taget[name[key]] in starred[name[expiredDeviceIds]] begin[:]
call[name[logger].warning, parameter[binary_operation[constant[Device timeout, removing ] + name[key]]]]
<ast.Delete object at 0x7da1b0e802e0>
call[name[time].sleep, parameter[constant[1]]]
call[name[logger].warning, parameter[constant[DeviceCaretaker is now shutdown]]] | keyword[def] identifier[_evictStaleDevices] ( identifier[self] ):
literal[string]
keyword[while] identifier[self] . identifier[running] :
identifier[expiredDeviceIds] =[ identifier[key] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[devices] . identifier[items] () keyword[if] identifier[value] . identifier[hasExpired] ()]
keyword[for] identifier[key] keyword[in] identifier[expiredDeviceIds] :
identifier[logger] . identifier[warning] ( literal[string] + identifier[key] )
keyword[del] identifier[self] . identifier[devices] [ identifier[key] ]
identifier[time] . identifier[sleep] ( literal[int] )
identifier[logger] . identifier[warning] ( literal[string] ) | def _evictStaleDevices(self):
"""
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a
while.
"""
while self.running:
expiredDeviceIds = [key for (key, value) in self.devices.items() if value.hasExpired()]
for key in expiredDeviceIds:
logger.warning('Device timeout, removing ' + key)
del self.devices[key] # depends on [control=['for'], data=['key']]
time.sleep(1) # depends on [control=['while'], data=[]]
# TODO send reset after a device fails
logger.warning('DeviceCaretaker is now shutdown') |
def normalized_table_calc(classes, table):
"""
Return normalized confusion matrix.
:param classes: classes list
:type classes:list
:param table: table
:type table:dict
:return: normalized table as dict
"""
map_dict = {k: 0 for k in classes}
new_table = {k: map_dict.copy() for k in classes}
for key in classes:
div = sum(table[key].values())
if div == 0:
div = 1
for item in classes:
new_table[key][item] = numpy.around(table[key][item] / div, 5)
return new_table | def function[normalized_table_calc, parameter[classes, table]]:
constant[
Return normalized confusion matrix.
:param classes: classes list
:type classes:list
:param table: table
:type table:dict
:return: normalized table as dict
]
variable[map_dict] assign[=] <ast.DictComp object at 0x7da1b1620340>
variable[new_table] assign[=] <ast.DictComp object at 0x7da1b1621450>
for taget[name[key]] in starred[name[classes]] begin[:]
variable[div] assign[=] call[name[sum], parameter[call[call[name[table]][name[key]].values, parameter[]]]]
if compare[name[div] equal[==] constant[0]] begin[:]
variable[div] assign[=] constant[1]
for taget[name[item]] in starred[name[classes]] begin[:]
call[call[name[new_table]][name[key]]][name[item]] assign[=] call[name[numpy].around, parameter[binary_operation[call[call[name[table]][name[key]]][name[item]] / name[div]], constant[5]]]
return[name[new_table]] | keyword[def] identifier[normalized_table_calc] ( identifier[classes] , identifier[table] ):
literal[string]
identifier[map_dict] ={ identifier[k] : literal[int] keyword[for] identifier[k] keyword[in] identifier[classes] }
identifier[new_table] ={ identifier[k] : identifier[map_dict] . identifier[copy] () keyword[for] identifier[k] keyword[in] identifier[classes] }
keyword[for] identifier[key] keyword[in] identifier[classes] :
identifier[div] = identifier[sum] ( identifier[table] [ identifier[key] ]. identifier[values] ())
keyword[if] identifier[div] == literal[int] :
identifier[div] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[classes] :
identifier[new_table] [ identifier[key] ][ identifier[item] ]= identifier[numpy] . identifier[around] ( identifier[table] [ identifier[key] ][ identifier[item] ]/ identifier[div] , literal[int] )
keyword[return] identifier[new_table] | def normalized_table_calc(classes, table):
"""
Return normalized confusion matrix.
:param classes: classes list
:type classes:list
:param table: table
:type table:dict
:return: normalized table as dict
"""
map_dict = {k: 0 for k in classes}
new_table = {k: map_dict.copy() for k in classes}
for key in classes:
div = sum(table[key].values())
if div == 0:
div = 1 # depends on [control=['if'], data=['div']]
for item in classes:
new_table[key][item] = numpy.around(table[key][item] / div, 5) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['key']]
return new_table |
def handle_fields(definitions, fields, prefix, offset, multiplier):
"""
Helper for handling naming and sizing of fields. It's terrible.
"""
items = []
for f in fields:
if f.type_id == "array" and f.options['fill'].value in CONSTRUCT_CODE:
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
if 'size' in f.options:
name = "%s[%s]" % (f.options['fill'].value, str(f.options['size'].value))
size = field_sizes[f.options['fill'].value] * f.options['size'].value
item = FieldItem(prefix_name, name, offset, size,
str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size
else:
name = "%s[%s]" % (f.options['fill'].value, "N")
multiplier = field_sizes[f.options['fill'].value]
size = field_sizes[f.options['fill'].value] * 1
item = FieldItem(prefix_name, name, offset, "N",
str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size
elif f.type_id == "string":
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
if 'size' in f.options:
name = "string"
size = field_sizes['u8'] * f.options['size'].value
item = FieldItem(prefix_name, name, offset, size,
str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size
else:
name = "string"
size = field_sizes['u8']
multiplier = 1
item = FieldItem(prefix_name, name, offset, "N",
str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size
elif f.type_id == "array":
name = f.options['fill'].value
definition = next(d for d in definitions if name == d.identifier)
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
(new_items, new_offset, new_multiplier) \
= handle_fields(definitions,
definition.fields,
prefix_name + "[N]",
offset,
multiplier)
multiplier = new_offset - offset
(newer_items, newer_offset, newer_multiplier) \
= handle_fields(definitions,
definition.fields,
prefix_name + "[N]", offset,
multiplier)
items += newer_items
offset = newer_offset
elif f.type_id not in CONSTRUCT_CODE:
name = f.type_id
definition = next(d for d in definitions if name == d.identifier)
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
(new_items, new_offset, new_multiplier) \
= handle_fields(definitions,
definition.fields,
prefix_name,
offset,
multiplier)
items += new_items
offset = new_offset
multiplier = new_multiplier
else:
size = field_sizes[f.type_id]
name = f.type_id
adj_offset = "%dN+%d" % (multiplier, offset) if multiplier else offset
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
item = FieldItem(prefix_name, name, adj_offset, size, str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size
return (items, offset, multiplier) | def function[handle_fields, parameter[definitions, fields, prefix, offset, multiplier]]:
constant[
Helper for handling naming and sizing of fields. It's terrible.
]
variable[items] assign[=] list[[]]
for taget[name[f]] in starred[name[fields]] begin[:]
if <ast.BoolOp object at 0x7da1b26af1c0> begin[:]
variable[prefix_name] assign[=] <ast.IfExp object at 0x7da1b26acb20>
variable[n_with_values] assign[=] call[name[f].options][constant[n_with_values]].value
variable[bitfields] assign[=] <ast.IfExp object at 0x7da1b26ad630>
if compare[constant[size] in name[f].options] begin[:]
variable[name] assign[=] binary_operation[constant[%s[%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26ad000>, <ast.Call object at 0x7da1b26adbd0>]]]
variable[size] assign[=] binary_operation[call[name[field_sizes]][call[name[f].options][constant[fill]].value] * call[name[f].options][constant[size]].value]
variable[item] assign[=] call[name[FieldItem], parameter[name[prefix_name], name[name], name[offset], name[size], call[name[str], parameter[name[f].units]], name[f].desc, name[n_with_values], name[bitfields]]]
call[name[items].append, parameter[name[item]]]
<ast.AugAssign object at 0x7da1b26acfd0>
return[tuple[[<ast.Name object at 0x7da1b0551d80>, <ast.Name object at 0x7da1b0550340>, <ast.Name object at 0x7da1b0552860>]]] | keyword[def] identifier[handle_fields] ( identifier[definitions] , identifier[fields] , identifier[prefix] , identifier[offset] , identifier[multiplier] ):
literal[string]
identifier[items] =[]
keyword[for] identifier[f] keyword[in] identifier[fields] :
keyword[if] identifier[f] . identifier[type_id] == literal[string] keyword[and] identifier[f] . identifier[options] [ literal[string] ]. identifier[value] keyword[in] identifier[CONSTRUCT_CODE] :
identifier[prefix_name] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[f] . identifier[identifier] ]) keyword[if] identifier[prefix] keyword[else] identifier[f] . identifier[identifier]
identifier[n_with_values] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[bitfields] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value] keyword[if] identifier[n_with_values] > literal[int] keyword[else] keyword[None]
keyword[if] literal[string] keyword[in] identifier[f] . identifier[options] :
identifier[name] = literal[string] %( identifier[f] . identifier[options] [ literal[string] ]. identifier[value] , identifier[str] ( identifier[f] . identifier[options] [ literal[string] ]. identifier[value] ))
identifier[size] = identifier[field_sizes] [ identifier[f] . identifier[options] [ literal[string] ]. identifier[value] ]* identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[item] = identifier[FieldItem] ( identifier[prefix_name] , identifier[name] , identifier[offset] , identifier[size] ,
identifier[str] ( identifier[f] . identifier[units] ), identifier[f] . identifier[desc] , identifier[n_with_values] , identifier[bitfields] )
identifier[items] . identifier[append] ( identifier[item] )
identifier[offset] += identifier[size]
keyword[else] :
identifier[name] = literal[string] %( identifier[f] . identifier[options] [ literal[string] ]. identifier[value] , literal[string] )
identifier[multiplier] = identifier[field_sizes] [ identifier[f] . identifier[options] [ literal[string] ]. identifier[value] ]
identifier[size] = identifier[field_sizes] [ identifier[f] . identifier[options] [ literal[string] ]. identifier[value] ]* literal[int]
identifier[item] = identifier[FieldItem] ( identifier[prefix_name] , identifier[name] , identifier[offset] , literal[string] ,
identifier[str] ( identifier[f] . identifier[units] ), identifier[f] . identifier[desc] , identifier[n_with_values] , identifier[bitfields] )
identifier[items] . identifier[append] ( identifier[item] )
identifier[offset] += identifier[size]
keyword[elif] identifier[f] . identifier[type_id] == literal[string] :
identifier[prefix_name] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[f] . identifier[identifier] ]) keyword[if] identifier[prefix] keyword[else] identifier[f] . identifier[identifier]
identifier[n_with_values] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[bitfields] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value] keyword[if] identifier[n_with_values] > literal[int] keyword[else] keyword[None]
keyword[if] literal[string] keyword[in] identifier[f] . identifier[options] :
identifier[name] = literal[string]
identifier[size] = identifier[field_sizes] [ literal[string] ]* identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[item] = identifier[FieldItem] ( identifier[prefix_name] , identifier[name] , identifier[offset] , identifier[size] ,
identifier[str] ( identifier[f] . identifier[units] ), identifier[f] . identifier[desc] , identifier[n_with_values] , identifier[bitfields] )
identifier[items] . identifier[append] ( identifier[item] )
identifier[offset] += identifier[size]
keyword[else] :
identifier[name] = literal[string]
identifier[size] = identifier[field_sizes] [ literal[string] ]
identifier[multiplier] = literal[int]
identifier[item] = identifier[FieldItem] ( identifier[prefix_name] , identifier[name] , identifier[offset] , literal[string] ,
identifier[str] ( identifier[f] . identifier[units] ), identifier[f] . identifier[desc] , identifier[n_with_values] , identifier[bitfields] )
identifier[items] . identifier[append] ( identifier[item] )
identifier[offset] += identifier[size]
keyword[elif] identifier[f] . identifier[type_id] == literal[string] :
identifier[name] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[definition] = identifier[next] ( identifier[d] keyword[for] identifier[d] keyword[in] identifier[definitions] keyword[if] identifier[name] == identifier[d] . identifier[identifier] )
identifier[prefix_name] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[f] . identifier[identifier] ]) keyword[if] identifier[prefix] keyword[else] identifier[f] . identifier[identifier]
( identifier[new_items] , identifier[new_offset] , identifier[new_multiplier] )= identifier[handle_fields] ( identifier[definitions] ,
identifier[definition] . identifier[fields] ,
identifier[prefix_name] + literal[string] ,
identifier[offset] ,
identifier[multiplier] )
identifier[multiplier] = identifier[new_offset] - identifier[offset]
( identifier[newer_items] , identifier[newer_offset] , identifier[newer_multiplier] )= identifier[handle_fields] ( identifier[definitions] ,
identifier[definition] . identifier[fields] ,
identifier[prefix_name] + literal[string] , identifier[offset] ,
identifier[multiplier] )
identifier[items] += identifier[newer_items]
identifier[offset] = identifier[newer_offset]
keyword[elif] identifier[f] . identifier[type_id] keyword[not] keyword[in] identifier[CONSTRUCT_CODE] :
identifier[name] = identifier[f] . identifier[type_id]
identifier[definition] = identifier[next] ( identifier[d] keyword[for] identifier[d] keyword[in] identifier[definitions] keyword[if] identifier[name] == identifier[d] . identifier[identifier] )
identifier[prefix_name] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[f] . identifier[identifier] ]) keyword[if] identifier[prefix] keyword[else] identifier[f] . identifier[identifier]
( identifier[new_items] , identifier[new_offset] , identifier[new_multiplier] )= identifier[handle_fields] ( identifier[definitions] ,
identifier[definition] . identifier[fields] ,
identifier[prefix_name] ,
identifier[offset] ,
identifier[multiplier] )
identifier[items] += identifier[new_items]
identifier[offset] = identifier[new_offset]
identifier[multiplier] = identifier[new_multiplier]
keyword[else] :
identifier[size] = identifier[field_sizes] [ identifier[f] . identifier[type_id] ]
identifier[name] = identifier[f] . identifier[type_id]
identifier[adj_offset] = literal[string] %( identifier[multiplier] , identifier[offset] ) keyword[if] identifier[multiplier] keyword[else] identifier[offset]
identifier[prefix_name] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[f] . identifier[identifier] ]) keyword[if] identifier[prefix] keyword[else] identifier[f] . identifier[identifier]
identifier[n_with_values] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value]
identifier[bitfields] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value] keyword[if] identifier[n_with_values] > literal[int] keyword[else] keyword[None]
identifier[item] = identifier[FieldItem] ( identifier[prefix_name] , identifier[name] , identifier[adj_offset] , identifier[size] , identifier[str] ( identifier[f] . identifier[units] ), identifier[f] . identifier[desc] , identifier[n_with_values] , identifier[bitfields] )
identifier[items] . identifier[append] ( identifier[item] )
identifier[offset] += identifier[size]
keyword[return] ( identifier[items] , identifier[offset] , identifier[multiplier] ) | def handle_fields(definitions, fields, prefix, offset, multiplier):
"""
Helper for handling naming and sizing of fields. It's terrible.
"""
items = []
for f in fields:
if f.type_id == 'array' and f.options['fill'].value in CONSTRUCT_CODE:
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
if 'size' in f.options:
name = '%s[%s]' % (f.options['fill'].value, str(f.options['size'].value))
size = field_sizes[f.options['fill'].value] * f.options['size'].value
item = FieldItem(prefix_name, name, offset, size, str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size # depends on [control=['if'], data=[]]
else:
name = '%s[%s]' % (f.options['fill'].value, 'N')
multiplier = field_sizes[f.options['fill'].value]
size = field_sizes[f.options['fill'].value] * 1
item = FieldItem(prefix_name, name, offset, 'N', str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size # depends on [control=['if'], data=[]]
elif f.type_id == 'string':
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
if 'size' in f.options:
name = 'string'
size = field_sizes['u8'] * f.options['size'].value
item = FieldItem(prefix_name, name, offset, size, str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size # depends on [control=['if'], data=[]]
else:
name = 'string'
size = field_sizes['u8']
multiplier = 1
item = FieldItem(prefix_name, name, offset, 'N', str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size # depends on [control=['if'], data=[]]
elif f.type_id == 'array':
name = f.options['fill'].value
definition = next((d for d in definitions if name == d.identifier))
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
(new_items, new_offset, new_multiplier) = handle_fields(definitions, definition.fields, prefix_name + '[N]', offset, multiplier)
multiplier = new_offset - offset
(newer_items, newer_offset, newer_multiplier) = handle_fields(definitions, definition.fields, prefix_name + '[N]', offset, multiplier)
items += newer_items
offset = newer_offset # depends on [control=['if'], data=[]]
elif f.type_id not in CONSTRUCT_CODE:
name = f.type_id
definition = next((d for d in definitions if name == d.identifier))
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
(new_items, new_offset, new_multiplier) = handle_fields(definitions, definition.fields, prefix_name, offset, multiplier)
items += new_items
offset = new_offset
multiplier = new_multiplier # depends on [control=['if'], data=[]]
else:
size = field_sizes[f.type_id]
name = f.type_id
adj_offset = '%dN+%d' % (multiplier, offset) if multiplier else offset
prefix_name = '.'.join([prefix, f.identifier]) if prefix else f.identifier
n_with_values = f.options['n_with_values'].value
bitfields = f.options['fields'].value if n_with_values > 0 else None
item = FieldItem(prefix_name, name, adj_offset, size, str(f.units), f.desc, n_with_values, bitfields)
items.append(item)
offset += size # depends on [control=['for'], data=['f']]
return (items, offset, multiplier) |
def upload_function_zip(self, location, zip_path, project_id=None):
"""
Uploads zip file with sources.
:param location: The location where the function is created.
:type location: str
:param zip_path: The path of the valid .zip file to upload.
:type zip_path: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:return: The upload URL that was returned by generateUploadUrl method.
"""
response = self.get_conn().projects().locations().functions().generateUploadUrl(
parent=self._full_location(project_id, location)
).execute(num_retries=self.num_retries)
upload_url = response.get('uploadUrl')
with open(zip_path, 'rb') as fp:
requests.put(
url=upload_url,
data=fp,
# Those two headers needs to be specified according to:
# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl
# nopep8
headers={
'Content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600',
}
)
return upload_url | def function[upload_function_zip, parameter[self, location, zip_path, project_id]]:
constant[
Uploads zip file with sources.
:param location: The location where the function is created.
:type location: str
:param zip_path: The path of the valid .zip file to upload.
:type zip_path: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:return: The upload URL that was returned by generateUploadUrl method.
]
variable[response] assign[=] call[call[call[call[call[call[name[self].get_conn, parameter[]].projects, parameter[]].locations, parameter[]].functions, parameter[]].generateUploadUrl, parameter[]].execute, parameter[]]
variable[upload_url] assign[=] call[name[response].get, parameter[constant[uploadUrl]]]
with call[name[open], parameter[name[zip_path], constant[rb]]] begin[:]
call[name[requests].put, parameter[]]
return[name[upload_url]] | keyword[def] identifier[upload_function_zip] ( identifier[self] , identifier[location] , identifier[zip_path] , identifier[project_id] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[get_conn] (). identifier[projects] (). identifier[locations] (). identifier[functions] (). identifier[generateUploadUrl] (
identifier[parent] = identifier[self] . identifier[_full_location] ( identifier[project_id] , identifier[location] )
). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] )
identifier[upload_url] = identifier[response] . identifier[get] ( literal[string] )
keyword[with] identifier[open] ( identifier[zip_path] , literal[string] ) keyword[as] identifier[fp] :
identifier[requests] . identifier[put] (
identifier[url] = identifier[upload_url] ,
identifier[data] = identifier[fp] ,
identifier[headers] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
)
keyword[return] identifier[upload_url] | def upload_function_zip(self, location, zip_path, project_id=None):
"""
Uploads zip file with sources.
:param location: The location where the function is created.
:type location: str
:param zip_path: The path of the valid .zip file to upload.
:type zip_path: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:return: The upload URL that was returned by generateUploadUrl method.
"""
response = self.get_conn().projects().locations().functions().generateUploadUrl(parent=self._full_location(project_id, location)).execute(num_retries=self.num_retries)
upload_url = response.get('uploadUrl')
with open(zip_path, 'rb') as fp:
# Those two headers needs to be specified according to:
# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl
# nopep8
requests.put(url=upload_url, data=fp, headers={'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600'}) # depends on [control=['with'], data=['fp']]
return upload_url |
def install_import_hook():
"""Installs __import__ hook."""
saved_import = builtins.__import__
@functools.wraps(saved_import)
def import_hook(name, *args, **kwargs):
if name == 'end':
process_import()
end
return saved_import(name, *args, **kwargs)
end
builtins.__import__ = import_hook | def function[install_import_hook, parameter[]]:
constant[Installs __import__ hook.]
variable[saved_import] assign[=] name[builtins].__import__
def function[import_hook, parameter[name]]:
if compare[name[name] equal[==] constant[end]] begin[:]
call[name[process_import], parameter[]]
name[end]
return[call[name[saved_import], parameter[name[name], <ast.Starred object at 0x7da1b0a05a80>]]]
name[end]
name[builtins].__import__ assign[=] name[import_hook] | keyword[def] identifier[install_import_hook] ():
literal[string]
identifier[saved_import] = identifier[builtins] . identifier[__import__]
@ identifier[functools] . identifier[wraps] ( identifier[saved_import] )
keyword[def] identifier[import_hook] ( identifier[name] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[name] == literal[string] :
identifier[process_import] ()
identifier[end]
keyword[return] identifier[saved_import] ( identifier[name] ,* identifier[args] ,** identifier[kwargs] )
identifier[end]
identifier[builtins] . identifier[__import__] = identifier[import_hook] | def install_import_hook():
"""Installs __import__ hook."""
saved_import = builtins.__import__
@functools.wraps(saved_import)
def import_hook(name, *args, **kwargs):
if name == 'end':
process_import() # depends on [control=['if'], data=[]]
end
return saved_import(name, *args, **kwargs)
end
builtins.__import__ = import_hook |
def send_mail(subject, message, recipient_list, from_email=None,
fail_silently=False, auth_user=None, auth_password=None,
connection=None, **kwargs):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
"""
connection = connection or get_connection(
username=auth_user,
password=auth_password,
fail_silently=fail_silently,
)
mail_message = EmailMessage(subject, message, from_email, recipient_list,
**kwargs)
return connection.send_messages([mail_message]) | def function[send_mail, parameter[subject, message, recipient_list, from_email, fail_silently, auth_user, auth_password, connection]]:
constant[
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
]
variable[connection] assign[=] <ast.BoolOp object at 0x7da18fe93eb0>
variable[mail_message] assign[=] call[name[EmailMessage], parameter[name[subject], name[message], name[from_email], name[recipient_list]]]
return[call[name[connection].send_messages, parameter[list[[<ast.Name object at 0x7da18fe93280>]]]]] | keyword[def] identifier[send_mail] ( identifier[subject] , identifier[message] , identifier[recipient_list] , identifier[from_email] = keyword[None] ,
identifier[fail_silently] = keyword[False] , identifier[auth_user] = keyword[None] , identifier[auth_password] = keyword[None] ,
identifier[connection] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[connection] = identifier[connection] keyword[or] identifier[get_connection] (
identifier[username] = identifier[auth_user] ,
identifier[password] = identifier[auth_password] ,
identifier[fail_silently] = identifier[fail_silently] ,
)
identifier[mail_message] = identifier[EmailMessage] ( identifier[subject] , identifier[message] , identifier[from_email] , identifier[recipient_list] ,
** identifier[kwargs] )
keyword[return] identifier[connection] . identifier[send_messages] ([ identifier[mail_message] ]) | def send_mail(subject, message, recipient_list, from_email=None, fail_silently=False, auth_user=None, auth_password=None, connection=None, **kwargs):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
"""
connection = connection or get_connection(username=auth_user, password=auth_password, fail_silently=fail_silently)
mail_message = EmailMessage(subject, message, from_email, recipient_list, **kwargs)
return connection.send_messages([mail_message]) |
def unskew_S1(S1, M, N):
"""
Unskew the sensivity indice
(Jean-Yves Tissot, Clémentine Prieur (2012) "Bias correction for the
estimation of sensitivity indices based on random balance designs.",
Reliability Engineering and System Safety, Elsevier, 107, 205-213.
doi:10.1016/j.ress.2012.06.010)
"""
lamb = (2 * M) / N
return S1 - lamb / (1 - lamb) * (1 - S1) | def function[unskew_S1, parameter[S1, M, N]]:
constant[
Unskew the sensivity indice
(Jean-Yves Tissot, Clémentine Prieur (2012) "Bias correction for the
estimation of sensitivity indices based on random balance designs.",
Reliability Engineering and System Safety, Elsevier, 107, 205-213.
doi:10.1016/j.ress.2012.06.010)
]
variable[lamb] assign[=] binary_operation[binary_operation[constant[2] * name[M]] / name[N]]
return[binary_operation[name[S1] - binary_operation[binary_operation[name[lamb] / binary_operation[constant[1] - name[lamb]]] * binary_operation[constant[1] - name[S1]]]]] | keyword[def] identifier[unskew_S1] ( identifier[S1] , identifier[M] , identifier[N] ):
literal[string]
identifier[lamb] =( literal[int] * identifier[M] )/ identifier[N]
keyword[return] identifier[S1] - identifier[lamb] /( literal[int] - identifier[lamb] )*( literal[int] - identifier[S1] ) | def unskew_S1(S1, M, N):
"""
Unskew the sensivity indice
(Jean-Yves Tissot, Clémentine Prieur (2012) "Bias correction for the
estimation of sensitivity indices based on random balance designs.",
Reliability Engineering and System Safety, Elsevier, 107, 205-213.
doi:10.1016/j.ress.2012.06.010)
"""
lamb = 2 * M / N
return S1 - lamb / (1 - lamb) * (1 - S1) |
def __process_instructions(self, inst):
"Act on instructions recieved"
to_send = []
for cmd, msg, com in inst:
if cmd not in config.CMDS: # ignore if it is not legal
continue
if cmd == 'MSG':
if self.mode == 's':
to_send.append((msg, com))
if self.color:
txt = config.Col.BOLD + msg + config.Col.ENDC
else:
txt = msg
print(txt)
if self.issue_alert:
os.system(self.alert)
elif cmd == 'QUIT':
if self.mode == 's': # client quit
com.close()
with self.__client_list_lock:
self.clients.remove(com)
else: # server quit
self.__s.close()
self.__make_client() # wait for new server
elif cmd == 'ASSUME':
if self.mode == 'c': # assume a server role if client
self.__s.close()
self.__make_server()
for msg, sender in to_send:
if self.mode == 'c':
utils.msg(msg, self.__s)
else:
with self.__client_list_lock:
for com in self.clients:
if com == sender:
continue
utils.msg(msg, com) | def function[__process_instructions, parameter[self, inst]]:
constant[Act on instructions recieved]
variable[to_send] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b141d150>, <ast.Name object at 0x7da1b141cfa0>, <ast.Name object at 0x7da1b141ead0>]]] in starred[name[inst]] begin[:]
if compare[name[cmd] <ast.NotIn object at 0x7da2590d7190> name[config].CMDS] begin[:]
continue
if compare[name[cmd] equal[==] constant[MSG]] begin[:]
if compare[name[self].mode equal[==] constant[s]] begin[:]
call[name[to_send].append, parameter[tuple[[<ast.Name object at 0x7da1b141ce50>, <ast.Name object at 0x7da1b141d4b0>]]]]
if name[self].color begin[:]
variable[txt] assign[=] binary_operation[binary_operation[name[config].Col.BOLD + name[msg]] + name[config].Col.ENDC]
call[name[print], parameter[name[txt]]]
if name[self].issue_alert begin[:]
call[name[os].system, parameter[name[self].alert]]
for taget[tuple[[<ast.Name object at 0x7da1b16d4cd0>, <ast.Name object at 0x7da1b16d4ca0>]]] in starred[name[to_send]] begin[:]
if compare[name[self].mode equal[==] constant[c]] begin[:]
call[name[utils].msg, parameter[name[msg], name[self].__s]] | keyword[def] identifier[__process_instructions] ( identifier[self] , identifier[inst] ):
literal[string]
identifier[to_send] =[]
keyword[for] identifier[cmd] , identifier[msg] , identifier[com] keyword[in] identifier[inst] :
keyword[if] identifier[cmd] keyword[not] keyword[in] identifier[config] . identifier[CMDS] :
keyword[continue]
keyword[if] identifier[cmd] == literal[string] :
keyword[if] identifier[self] . identifier[mode] == literal[string] :
identifier[to_send] . identifier[append] (( identifier[msg] , identifier[com] ))
keyword[if] identifier[self] . identifier[color] :
identifier[txt] = identifier[config] . identifier[Col] . identifier[BOLD] + identifier[msg] + identifier[config] . identifier[Col] . identifier[ENDC]
keyword[else] :
identifier[txt] = identifier[msg]
identifier[print] ( identifier[txt] )
keyword[if] identifier[self] . identifier[issue_alert] :
identifier[os] . identifier[system] ( identifier[self] . identifier[alert] )
keyword[elif] identifier[cmd] == literal[string] :
keyword[if] identifier[self] . identifier[mode] == literal[string] :
identifier[com] . identifier[close] ()
keyword[with] identifier[self] . identifier[__client_list_lock] :
identifier[self] . identifier[clients] . identifier[remove] ( identifier[com] )
keyword[else] :
identifier[self] . identifier[__s] . identifier[close] ()
identifier[self] . identifier[__make_client] ()
keyword[elif] identifier[cmd] == literal[string] :
keyword[if] identifier[self] . identifier[mode] == literal[string] :
identifier[self] . identifier[__s] . identifier[close] ()
identifier[self] . identifier[__make_server] ()
keyword[for] identifier[msg] , identifier[sender] keyword[in] identifier[to_send] :
keyword[if] identifier[self] . identifier[mode] == literal[string] :
identifier[utils] . identifier[msg] ( identifier[msg] , identifier[self] . identifier[__s] )
keyword[else] :
keyword[with] identifier[self] . identifier[__client_list_lock] :
keyword[for] identifier[com] keyword[in] identifier[self] . identifier[clients] :
keyword[if] identifier[com] == identifier[sender] :
keyword[continue]
identifier[utils] . identifier[msg] ( identifier[msg] , identifier[com] ) | def __process_instructions(self, inst):
"""Act on instructions recieved"""
to_send = []
for (cmd, msg, com) in inst:
if cmd not in config.CMDS: # ignore if it is not legal
continue # depends on [control=['if'], data=[]]
if cmd == 'MSG':
if self.mode == 's':
to_send.append((msg, com)) # depends on [control=['if'], data=[]]
if self.color:
txt = config.Col.BOLD + msg + config.Col.ENDC # depends on [control=['if'], data=[]]
else:
txt = msg
print(txt)
if self.issue_alert:
os.system(self.alert) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif cmd == 'QUIT':
if self.mode == 's': # client quit
com.close()
with self.__client_list_lock:
self.clients.remove(com) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else: # server quit
self.__s.close()
self.__make_client() # wait for new server # depends on [control=['if'], data=[]]
elif cmd == 'ASSUME':
if self.mode == 'c': # assume a server role if client
self.__s.close()
self.__make_server() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (msg, sender) in to_send:
if self.mode == 'c':
utils.msg(msg, self.__s) # depends on [control=['if'], data=[]]
else:
with self.__client_list_lock:
for com in self.clients:
if com == sender:
continue # depends on [control=['if'], data=[]]
utils.msg(msg, com) # depends on [control=['for'], data=['com']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]] |
def get_usable_ciphersuites(l, kx):
"""
From a list of proposed ciphersuites, this function returns a list of
usable cipher suites, i.e. for which key exchange, cipher and hash
algorithms are known to be implemented and usable in current version of the
TLS extension. The order of the cipher suites in the list returned by the
function matches the one of the proposal.
"""
res = []
for c in l:
if c in _tls_cipher_suites_cls:
ciph = _tls_cipher_suites_cls[c]
if ciph.usable:
# XXX select among RSA and ECDSA cipher suites
# according to the key(s) the server was given
if ciph.kx_alg.anonymous or kx in ciph.kx_alg.name:
res.append(c)
return res | def function[get_usable_ciphersuites, parameter[l, kx]]:
constant[
From a list of proposed ciphersuites, this function returns a list of
usable cipher suites, i.e. for which key exchange, cipher and hash
algorithms are known to be implemented and usable in current version of the
TLS extension. The order of the cipher suites in the list returned by the
function matches the one of the proposal.
]
variable[res] assign[=] list[[]]
for taget[name[c]] in starred[name[l]] begin[:]
if compare[name[c] in name[_tls_cipher_suites_cls]] begin[:]
variable[ciph] assign[=] call[name[_tls_cipher_suites_cls]][name[c]]
if name[ciph].usable begin[:]
if <ast.BoolOp object at 0x7da1b1fc9420> begin[:]
call[name[res].append, parameter[name[c]]]
return[name[res]] | keyword[def] identifier[get_usable_ciphersuites] ( identifier[l] , identifier[kx] ):
literal[string]
identifier[res] =[]
keyword[for] identifier[c] keyword[in] identifier[l] :
keyword[if] identifier[c] keyword[in] identifier[_tls_cipher_suites_cls] :
identifier[ciph] = identifier[_tls_cipher_suites_cls] [ identifier[c] ]
keyword[if] identifier[ciph] . identifier[usable] :
keyword[if] identifier[ciph] . identifier[kx_alg] . identifier[anonymous] keyword[or] identifier[kx] keyword[in] identifier[ciph] . identifier[kx_alg] . identifier[name] :
identifier[res] . identifier[append] ( identifier[c] )
keyword[return] identifier[res] | def get_usable_ciphersuites(l, kx):
"""
From a list of proposed ciphersuites, this function returns a list of
usable cipher suites, i.e. for which key exchange, cipher and hash
algorithms are known to be implemented and usable in current version of the
TLS extension. The order of the cipher suites in the list returned by the
function matches the one of the proposal.
"""
res = []
for c in l:
if c in _tls_cipher_suites_cls:
ciph = _tls_cipher_suites_cls[c]
if ciph.usable:
# XXX select among RSA and ECDSA cipher suites
# according to the key(s) the server was given
if ciph.kx_alg.anonymous or kx in ciph.kx_alg.name:
res.append(c) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['c', '_tls_cipher_suites_cls']] # depends on [control=['for'], data=['c']]
return res |
def parse(pem_str):
# type: (bytes) -> List[AbstractPEMObject]
"""
Extract PEM objects from *pem_str*.
:param pem_str: String to parse.
:type pem_str: bytes
:return: list of :ref:`pem-objects`
"""
return [
_PEM_TO_CLASS[match.group(1)](match.group(0))
for match in _PEM_RE.finditer(pem_str)
] | def function[parse, parameter[pem_str]]:
constant[
Extract PEM objects from *pem_str*.
:param pem_str: String to parse.
:type pem_str: bytes
:return: list of :ref:`pem-objects`
]
return[<ast.ListComp object at 0x7da18f00d5d0>] | keyword[def] identifier[parse] ( identifier[pem_str] ):
literal[string]
keyword[return] [
identifier[_PEM_TO_CLASS] [ identifier[match] . identifier[group] ( literal[int] )]( identifier[match] . identifier[group] ( literal[int] ))
keyword[for] identifier[match] keyword[in] identifier[_PEM_RE] . identifier[finditer] ( identifier[pem_str] )
] | def parse(pem_str):
# type: (bytes) -> List[AbstractPEMObject]
'\n Extract PEM objects from *pem_str*.\n\n :param pem_str: String to parse.\n :type pem_str: bytes\n :return: list of :ref:`pem-objects`\n '
return [_PEM_TO_CLASS[match.group(1)](match.group(0)) for match in _PEM_RE.finditer(pem_str)] |
def _get_info(self, formula_def):
'''
Get package info
'''
fields = (
'name',
'os',
'os_family',
'release',
'version',
'dependencies',
'os_dependencies',
'os_family_dependencies',
'summary',
'description',
)
for item in fields:
if item not in formula_def:
formula_def[item] = 'None'
if 'installed' not in formula_def:
formula_def['installed'] = 'Not installed'
return ('Name: {name}\n'
'Version: {version}\n'
'Release: {release}\n'
'Install Date: {installed}\n'
'Supported OSes: {os}\n'
'Supported OS families: {os_family}\n'
'Dependencies: {dependencies}\n'
'OS Dependencies: {os_dependencies}\n'
'OS Family Dependencies: {os_family_dependencies}\n'
'Summary: {summary}\n'
'Description:\n'
'{description}').format(**formula_def) | def function[_get_info, parameter[self, formula_def]]:
constant[
Get package info
]
variable[fields] assign[=] tuple[[<ast.Constant object at 0x7da20c6c6650>, <ast.Constant object at 0x7da20c6c6140>, <ast.Constant object at 0x7da20c6c7e20>, <ast.Constant object at 0x7da20c6c5c60>, <ast.Constant object at 0x7da20c6c6a70>, <ast.Constant object at 0x7da20c6c4130>, <ast.Constant object at 0x7da20c6c6320>, <ast.Constant object at 0x7da20c6c7cd0>, <ast.Constant object at 0x7da20c6c4b80>, <ast.Constant object at 0x7da20c6c6c50>]]
for taget[name[item]] in starred[name[fields]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[formula_def]] begin[:]
call[name[formula_def]][name[item]] assign[=] constant[None]
if compare[constant[installed] <ast.NotIn object at 0x7da2590d7190> name[formula_def]] begin[:]
call[name[formula_def]][constant[installed]] assign[=] constant[Not installed]
return[call[constant[Name: {name}
Version: {version}
Release: {release}
Install Date: {installed}
Supported OSes: {os}
Supported OS families: {os_family}
Dependencies: {dependencies}
OS Dependencies: {os_dependencies}
OS Family Dependencies: {os_family_dependencies}
Summary: {summary}
Description:
{description}].format, parameter[]]] | keyword[def] identifier[_get_info] ( identifier[self] , identifier[formula_def] ):
literal[string]
identifier[fields] =(
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
)
keyword[for] identifier[item] keyword[in] identifier[fields] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[formula_def] :
identifier[formula_def] [ identifier[item] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[formula_def] :
identifier[formula_def] [ literal[string] ]= literal[string]
keyword[return] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ). identifier[format] (** identifier[formula_def] ) | def _get_info(self, formula_def):
"""
Get package info
"""
fields = ('name', 'os', 'os_family', 'release', 'version', 'dependencies', 'os_dependencies', 'os_family_dependencies', 'summary', 'description')
for item in fields:
if item not in formula_def:
formula_def[item] = 'None' # depends on [control=['if'], data=['item', 'formula_def']] # depends on [control=['for'], data=['item']]
if 'installed' not in formula_def:
formula_def['installed'] = 'Not installed' # depends on [control=['if'], data=['formula_def']]
return 'Name: {name}\nVersion: {version}\nRelease: {release}\nInstall Date: {installed}\nSupported OSes: {os}\nSupported OS families: {os_family}\nDependencies: {dependencies}\nOS Dependencies: {os_dependencies}\nOS Family Dependencies: {os_family_dependencies}\nSummary: {summary}\nDescription:\n{description}'.format(**formula_def) |
def get_projects(self, state_filter=None, top=None, skip=None, continuation_token=None, get_default_team_image_url=None):
"""GetProjects.
Get all projects in the organization that the authenticated user has access to.
:param str state_filter: Filter on team projects in a specific team project state (default: WellFormed).
:param int top:
:param int skip:
:param str continuation_token:
:param bool get_default_team_image_url:
:rtype: [TeamProjectReference]
"""
query_parameters = {}
if state_filter is not None:
query_parameters['stateFilter'] = self._serialize.query('state_filter', state_filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if get_default_team_image_url is not None:
query_parameters['getDefaultTeamImageUrl'] = self._serialize.query('get_default_team_image_url', get_default_team_image_url, 'bool')
response = self._send(http_method='GET',
location_id='603fe2ac-9723-48b9-88ad-09305aa6c6e1',
version='5.0',
query_parameters=query_parameters)
return self._deserialize('[TeamProjectReference]', self._unwrap_collection(response)) | def function[get_projects, parameter[self, state_filter, top, skip, continuation_token, get_default_team_image_url]]:
constant[GetProjects.
Get all projects in the organization that the authenticated user has access to.
:param str state_filter: Filter on team projects in a specific team project state (default: WellFormed).
:param int top:
:param int skip:
:param str continuation_token:
:param bool get_default_team_image_url:
:rtype: [TeamProjectReference]
]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[state_filter] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[stateFilter]] assign[=] call[name[self]._serialize.query, parameter[constant[state_filter], name[state_filter], constant[str]]]
if compare[name[top] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[$top]] assign[=] call[name[self]._serialize.query, parameter[constant[top], name[top], constant[int]]]
if compare[name[skip] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[$skip]] assign[=] call[name[self]._serialize.query, parameter[constant[skip], name[skip], constant[int]]]
if compare[name[continuation_token] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[continuationToken]] assign[=] call[name[self]._serialize.query, parameter[constant[continuation_token], name[continuation_token], constant[str]]]
if compare[name[get_default_team_image_url] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[getDefaultTeamImageUrl]] assign[=] call[name[self]._serialize.query, parameter[constant[get_default_team_image_url], name[get_default_team_image_url], constant[bool]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[TeamProjectReference]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_projects] ( identifier[self] , identifier[state_filter] = keyword[None] , identifier[top] = keyword[None] , identifier[skip] = keyword[None] , identifier[continuation_token] = keyword[None] , identifier[get_default_team_image_url] = keyword[None] ):
literal[string]
identifier[query_parameters] ={}
keyword[if] identifier[state_filter] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[state_filter] , literal[string] )
keyword[if] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[top] , literal[string] )
keyword[if] identifier[skip] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[skip] , literal[string] )
keyword[if] identifier[continuation_token] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[continuation_token] , literal[string] )
keyword[if] identifier[get_default_team_image_url] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[get_default_team_image_url] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_projects(self, state_filter=None, top=None, skip=None, continuation_token=None, get_default_team_image_url=None):
"""GetProjects.
Get all projects in the organization that the authenticated user has access to.
:param str state_filter: Filter on team projects in a specific team project state (default: WellFormed).
:param int top:
:param int skip:
:param str continuation_token:
:param bool get_default_team_image_url:
:rtype: [TeamProjectReference]
"""
query_parameters = {}
if state_filter is not None:
query_parameters['stateFilter'] = self._serialize.query('state_filter', state_filter, 'str') # depends on [control=['if'], data=['state_filter']]
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int') # depends on [control=['if'], data=['top']]
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int') # depends on [control=['if'], data=['skip']]
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str') # depends on [control=['if'], data=['continuation_token']]
if get_default_team_image_url is not None:
query_parameters['getDefaultTeamImageUrl'] = self._serialize.query('get_default_team_image_url', get_default_team_image_url, 'bool') # depends on [control=['if'], data=['get_default_team_image_url']]
response = self._send(http_method='GET', location_id='603fe2ac-9723-48b9-88ad-09305aa6c6e1', version='5.0', query_parameters=query_parameters)
return self._deserialize('[TeamProjectReference]', self._unwrap_collection(response)) |
def _get_condition(self, acceptance_prob, a):
"""
Temporary method to fix issue in numpy 0.12 #852
"""
if a == 1:
return (acceptance_prob ** a) > (1/(2**a))
else:
return (1/(acceptance_prob ** a)) > (2**(-a)) | def function[_get_condition, parameter[self, acceptance_prob, a]]:
constant[
Temporary method to fix issue in numpy 0.12 #852
]
if compare[name[a] equal[==] constant[1]] begin[:]
return[compare[binary_operation[name[acceptance_prob] ** name[a]] greater[>] binary_operation[constant[1] / binary_operation[constant[2] ** name[a]]]]] | keyword[def] identifier[_get_condition] ( identifier[self] , identifier[acceptance_prob] , identifier[a] ):
literal[string]
keyword[if] identifier[a] == literal[int] :
keyword[return] ( identifier[acceptance_prob] ** identifier[a] )>( literal[int] /( literal[int] ** identifier[a] ))
keyword[else] :
keyword[return] ( literal[int] /( identifier[acceptance_prob] ** identifier[a] ))>( literal[int] **(- identifier[a] )) | def _get_condition(self, acceptance_prob, a):
"""
Temporary method to fix issue in numpy 0.12 #852
"""
if a == 1:
return acceptance_prob ** a > 1 / 2 ** a # depends on [control=['if'], data=['a']]
else:
return 1 / acceptance_prob ** a > 2 ** (-a) |
def calc_complex(mag, pha):
''' Calculate real and imaginary part of the complex conductivity from
magnitude and phase in log10.
'''
complx = [10 ** m * math.e ** (1j * p / 1e3) for m, p in zip(mag, pha)]
real = [math.log10((1 / c).real) for c in complx]
imag = []
for c in complx:
if ((1 / c).imag) == 0:
imag.append(math.nan)
else:
i = math.log10(abs((1 / c).imag))
imag.append(i)
return real, imag | def function[calc_complex, parameter[mag, pha]]:
constant[ Calculate real and imaginary part of the complex conductivity from
magnitude and phase in log10.
]
variable[complx] assign[=] <ast.ListComp object at 0x7da1b24e19c0>
variable[real] assign[=] <ast.ListComp object at 0x7da1b24e1e10>
variable[imag] assign[=] list[[]]
for taget[name[c]] in starred[name[complx]] begin[:]
if compare[binary_operation[constant[1] / name[c]].imag equal[==] constant[0]] begin[:]
call[name[imag].append, parameter[name[math].nan]]
return[tuple[[<ast.Name object at 0x7da1b2218d30>, <ast.Name object at 0x7da1b2248100>]]] | keyword[def] identifier[calc_complex] ( identifier[mag] , identifier[pha] ):
literal[string]
identifier[complx] =[ literal[int] ** identifier[m] * identifier[math] . identifier[e] **( literal[int] * identifier[p] / literal[int] ) keyword[for] identifier[m] , identifier[p] keyword[in] identifier[zip] ( identifier[mag] , identifier[pha] )]
identifier[real] =[ identifier[math] . identifier[log10] (( literal[int] / identifier[c] ). identifier[real] ) keyword[for] identifier[c] keyword[in] identifier[complx] ]
identifier[imag] =[]
keyword[for] identifier[c] keyword[in] identifier[complx] :
keyword[if] (( literal[int] / identifier[c] ). identifier[imag] )== literal[int] :
identifier[imag] . identifier[append] ( identifier[math] . identifier[nan] )
keyword[else] :
identifier[i] = identifier[math] . identifier[log10] ( identifier[abs] (( literal[int] / identifier[c] ). identifier[imag] ))
identifier[imag] . identifier[append] ( identifier[i] )
keyword[return] identifier[real] , identifier[imag] | def calc_complex(mag, pha):
""" Calculate real and imaginary part of the complex conductivity from
magnitude and phase in log10.
"""
complx = [10 ** m * math.e ** (1j * p / 1000.0) for (m, p) in zip(mag, pha)]
real = [math.log10((1 / c).real) for c in complx]
imag = []
for c in complx:
if (1 / c).imag == 0:
imag.append(math.nan) # depends on [control=['if'], data=[]]
else:
i = math.log10(abs((1 / c).imag))
imag.append(i) # depends on [control=['for'], data=['c']]
return (real, imag) |
def _cholesky(self, A, **kwargs):
"""
method to handle potential problems with the cholesky decomposition.
will try to increase L2 regularization of the penalty matrix to
do away with non-positive-definite errors
Parameters
----------
A : np.array
Returns
-------
np.array
"""
# create appropriate-size diagonal matrix
if sp.sparse.issparse(A):
diag = sp.sparse.eye(A.shape[0])
else:
diag = np.eye(A.shape[0])
constraint_l2 = self._constraint_l2
while constraint_l2 <= self._constraint_l2_max:
try:
L = cholesky(A, **kwargs)
self._constraint_l2 = constraint_l2
return L
except NotPositiveDefiniteError:
if self.verbose:
warnings.warn('Matrix is not positive definite. \n'\
'Increasing l2 reg by factor of 10.',
stacklevel=2)
A -= constraint_l2 * diag
constraint_l2 *= 10
A += constraint_l2 * diag
raise NotPositiveDefiniteError('Matrix is not positive \n'
'definite.') | def function[_cholesky, parameter[self, A]]:
constant[
method to handle potential problems with the cholesky decomposition.
will try to increase L2 regularization of the penalty matrix to
do away with non-positive-definite errors
Parameters
----------
A : np.array
Returns
-------
np.array
]
if call[name[sp].sparse.issparse, parameter[name[A]]] begin[:]
variable[diag] assign[=] call[name[sp].sparse.eye, parameter[call[name[A].shape][constant[0]]]]
variable[constraint_l2] assign[=] name[self]._constraint_l2
while compare[name[constraint_l2] less_or_equal[<=] name[self]._constraint_l2_max] begin[:]
<ast.Try object at 0x7da20c6ab610>
<ast.Raise object at 0x7da20e963a30> | keyword[def] identifier[_cholesky] ( identifier[self] , identifier[A] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[sp] . identifier[sparse] . identifier[issparse] ( identifier[A] ):
identifier[diag] = identifier[sp] . identifier[sparse] . identifier[eye] ( identifier[A] . identifier[shape] [ literal[int] ])
keyword[else] :
identifier[diag] = identifier[np] . identifier[eye] ( identifier[A] . identifier[shape] [ literal[int] ])
identifier[constraint_l2] = identifier[self] . identifier[_constraint_l2]
keyword[while] identifier[constraint_l2] <= identifier[self] . identifier[_constraint_l2_max] :
keyword[try] :
identifier[L] = identifier[cholesky] ( identifier[A] ,** identifier[kwargs] )
identifier[self] . identifier[_constraint_l2] = identifier[constraint_l2]
keyword[return] identifier[L]
keyword[except] identifier[NotPositiveDefiniteError] :
keyword[if] identifier[self] . identifier[verbose] :
identifier[warnings] . identifier[warn] ( literal[string] literal[string] ,
identifier[stacklevel] = literal[int] )
identifier[A] -= identifier[constraint_l2] * identifier[diag]
identifier[constraint_l2] *= literal[int]
identifier[A] += identifier[constraint_l2] * identifier[diag]
keyword[raise] identifier[NotPositiveDefiniteError] ( literal[string]
literal[string] ) | def _cholesky(self, A, **kwargs):
"""
method to handle potential problems with the cholesky decomposition.
will try to increase L2 regularization of the penalty matrix to
do away with non-positive-definite errors
Parameters
----------
A : np.array
Returns
-------
np.array
"""
# create appropriate-size diagonal matrix
if sp.sparse.issparse(A):
diag = sp.sparse.eye(A.shape[0]) # depends on [control=['if'], data=[]]
else:
diag = np.eye(A.shape[0])
constraint_l2 = self._constraint_l2
while constraint_l2 <= self._constraint_l2_max:
try:
L = cholesky(A, **kwargs)
self._constraint_l2 = constraint_l2
return L # depends on [control=['try'], data=[]]
except NotPositiveDefiniteError:
if self.verbose:
warnings.warn('Matrix is not positive definite. \nIncreasing l2 reg by factor of 10.', stacklevel=2) # depends on [control=['if'], data=[]]
A -= constraint_l2 * diag
constraint_l2 *= 10
A += constraint_l2 * diag # depends on [control=['except'], data=[]] # depends on [control=['while'], data=['constraint_l2']]
raise NotPositiveDefiniteError('Matrix is not positive \ndefinite.') |
def setVisible( self, state ):
"""
Handles the visibility operation for this dialog.
:param state | <bool>
"""
super(XViewProfileDialog, self).setVisible(state)
if ( state ):
self.activateWindow()
self.uiNameTXT.setFocus()
self.uiNameTXT.selectAll() | def function[setVisible, parameter[self, state]]:
constant[
Handles the visibility operation for this dialog.
:param state | <bool>
]
call[call[name[super], parameter[name[XViewProfileDialog], name[self]]].setVisible, parameter[name[state]]]
if name[state] begin[:]
call[name[self].activateWindow, parameter[]]
call[name[self].uiNameTXT.setFocus, parameter[]]
call[name[self].uiNameTXT.selectAll, parameter[]] | keyword[def] identifier[setVisible] ( identifier[self] , identifier[state] ):
literal[string]
identifier[super] ( identifier[XViewProfileDialog] , identifier[self] ). identifier[setVisible] ( identifier[state] )
keyword[if] ( identifier[state] ):
identifier[self] . identifier[activateWindow] ()
identifier[self] . identifier[uiNameTXT] . identifier[setFocus] ()
identifier[self] . identifier[uiNameTXT] . identifier[selectAll] () | def setVisible(self, state):
"""
Handles the visibility operation for this dialog.
:param state | <bool>
"""
super(XViewProfileDialog, self).setVisible(state)
if state:
self.activateWindow()
self.uiNameTXT.setFocus()
self.uiNameTXT.selectAll() # depends on [control=['if'], data=[]] |
def from_dict(cls, d):
"""
Reconstructs the StructureEnvironments object from a dict representation of the StructureEnvironments created
using the as_dict method.
:param d: dict representation of the StructureEnvironments object
:return: StructureEnvironments object
"""
ce_list = [None if (ce_dict == 'None' or ce_dict is None) else {
int(cn): [None if (ced is None or ced == 'None') else
ChemicalEnvironments.from_dict(ced) for ced in ce_dict[cn]]
for cn in ce_dict} for ce_dict in d['ce_list']]
voronoi = DetailedVoronoiContainer.from_dict(d['voronoi'])
structure = Structure.from_dict(d['structure'])
neighbors_sets = [{int(cn): [cls.NeighborsSet.from_dict(dd=nb_set_dict,
structure=structure,
detailed_voronoi=voronoi)
for nb_set_dict in nb_sets]
for cn, nb_sets in site_nbs_sets_dict.items()}
if site_nbs_sets_dict is not None else None
for site_nbs_sets_dict in d['neighbors_sets']]
info = {key: val for key, val in d['info'].items() if key not in ['sites_info']}
if 'sites_info' in d['info']:
info['sites_info'] = [{'nb_sets_info': {int(cn): {int(inb_set): nb_set_info
for inb_set, nb_set_info in cn_sets.items()}
for cn, cn_sets in site_info['nb_sets_info'].items()},
'time': site_info['time']} if 'nb_sets_info' in site_info else {}
for site_info in d['info']['sites_info']]
return cls(voronoi=voronoi, valences=d['valences'],
sites_map=d['sites_map'],
equivalent_sites=[[PeriodicSite.from_dict(psd) for psd in psl] for psl in d['equivalent_sites']],
ce_list=ce_list, structure=structure,
neighbors_sets=neighbors_sets,
info=info) | def function[from_dict, parameter[cls, d]]:
constant[
Reconstructs the StructureEnvironments object from a dict representation of the StructureEnvironments created
using the as_dict method.
:param d: dict representation of the StructureEnvironments object
:return: StructureEnvironments object
]
variable[ce_list] assign[=] <ast.ListComp object at 0x7da207f98340>
variable[voronoi] assign[=] call[name[DetailedVoronoiContainer].from_dict, parameter[call[name[d]][constant[voronoi]]]]
variable[structure] assign[=] call[name[Structure].from_dict, parameter[call[name[d]][constant[structure]]]]
variable[neighbors_sets] assign[=] <ast.ListComp object at 0x7da20c991ab0>
variable[info] assign[=] <ast.DictComp object at 0x7da20c9905e0>
if compare[constant[sites_info] in call[name[d]][constant[info]]] begin[:]
call[name[info]][constant[sites_info]] assign[=] <ast.ListComp object at 0x7da204344d90>
return[call[name[cls], parameter[]]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[d] ):
literal[string]
identifier[ce_list] =[ keyword[None] keyword[if] ( identifier[ce_dict] == literal[string] keyword[or] identifier[ce_dict] keyword[is] keyword[None] ) keyword[else] {
identifier[int] ( identifier[cn] ):[ keyword[None] keyword[if] ( identifier[ced] keyword[is] keyword[None] keyword[or] identifier[ced] == literal[string] ) keyword[else]
identifier[ChemicalEnvironments] . identifier[from_dict] ( identifier[ced] ) keyword[for] identifier[ced] keyword[in] identifier[ce_dict] [ identifier[cn] ]]
keyword[for] identifier[cn] keyword[in] identifier[ce_dict] } keyword[for] identifier[ce_dict] keyword[in] identifier[d] [ literal[string] ]]
identifier[voronoi] = identifier[DetailedVoronoiContainer] . identifier[from_dict] ( identifier[d] [ literal[string] ])
identifier[structure] = identifier[Structure] . identifier[from_dict] ( identifier[d] [ literal[string] ])
identifier[neighbors_sets] =[{ identifier[int] ( identifier[cn] ):[ identifier[cls] . identifier[NeighborsSet] . identifier[from_dict] ( identifier[dd] = identifier[nb_set_dict] ,
identifier[structure] = identifier[structure] ,
identifier[detailed_voronoi] = identifier[voronoi] )
keyword[for] identifier[nb_set_dict] keyword[in] identifier[nb_sets] ]
keyword[for] identifier[cn] , identifier[nb_sets] keyword[in] identifier[site_nbs_sets_dict] . identifier[items] ()}
keyword[if] identifier[site_nbs_sets_dict] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
keyword[for] identifier[site_nbs_sets_dict] keyword[in] identifier[d] [ literal[string] ]]
identifier[info] ={ identifier[key] : identifier[val] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[d] [ literal[string] ]. identifier[items] () keyword[if] identifier[key] keyword[not] keyword[in] [ literal[string] ]}
keyword[if] literal[string] keyword[in] identifier[d] [ literal[string] ]:
identifier[info] [ literal[string] ]=[{ literal[string] :{ identifier[int] ( identifier[cn] ):{ identifier[int] ( identifier[inb_set] ): identifier[nb_set_info]
keyword[for] identifier[inb_set] , identifier[nb_set_info] keyword[in] identifier[cn_sets] . identifier[items] ()}
keyword[for] identifier[cn] , identifier[cn_sets] keyword[in] identifier[site_info] [ literal[string] ]. identifier[items] ()},
literal[string] : identifier[site_info] [ literal[string] ]} keyword[if] literal[string] keyword[in] identifier[site_info] keyword[else] {}
keyword[for] identifier[site_info] keyword[in] identifier[d] [ literal[string] ][ literal[string] ]]
keyword[return] identifier[cls] ( identifier[voronoi] = identifier[voronoi] , identifier[valences] = identifier[d] [ literal[string] ],
identifier[sites_map] = identifier[d] [ literal[string] ],
identifier[equivalent_sites] =[[ identifier[PeriodicSite] . identifier[from_dict] ( identifier[psd] ) keyword[for] identifier[psd] keyword[in] identifier[psl] ] keyword[for] identifier[psl] keyword[in] identifier[d] [ literal[string] ]],
identifier[ce_list] = identifier[ce_list] , identifier[structure] = identifier[structure] ,
identifier[neighbors_sets] = identifier[neighbors_sets] ,
identifier[info] = identifier[info] ) | def from_dict(cls, d):
"""
Reconstructs the StructureEnvironments object from a dict representation of the StructureEnvironments created
using the as_dict method.
:param d: dict representation of the StructureEnvironments object
:return: StructureEnvironments object
"""
ce_list = [None if ce_dict == 'None' or ce_dict is None else {int(cn): [None if ced is None or ced == 'None' else ChemicalEnvironments.from_dict(ced) for ced in ce_dict[cn]] for cn in ce_dict} for ce_dict in d['ce_list']]
voronoi = DetailedVoronoiContainer.from_dict(d['voronoi'])
structure = Structure.from_dict(d['structure'])
neighbors_sets = [{int(cn): [cls.NeighborsSet.from_dict(dd=nb_set_dict, structure=structure, detailed_voronoi=voronoi) for nb_set_dict in nb_sets] for (cn, nb_sets) in site_nbs_sets_dict.items()} if site_nbs_sets_dict is not None else None for site_nbs_sets_dict in d['neighbors_sets']]
info = {key: val for (key, val) in d['info'].items() if key not in ['sites_info']}
if 'sites_info' in d['info']:
info['sites_info'] = [{'nb_sets_info': {int(cn): {int(inb_set): nb_set_info for (inb_set, nb_set_info) in cn_sets.items()} for (cn, cn_sets) in site_info['nb_sets_info'].items()}, 'time': site_info['time']} if 'nb_sets_info' in site_info else {} for site_info in d['info']['sites_info']] # depends on [control=['if'], data=[]]
return cls(voronoi=voronoi, valences=d['valences'], sites_map=d['sites_map'], equivalent_sites=[[PeriodicSite.from_dict(psd) for psd in psl] for psl in d['equivalent_sites']], ce_list=ce_list, structure=structure, neighbors_sets=neighbors_sets, info=info) |
def calcDeviationLimits(value, tolerance, mode):
"""Returns the upper and lower deviation limits for a value and a given
tolerance, either as relative or a absolute difference.
:param value: can be a single value or a list of values if a list of values
is given, the minimal value will be used to calculate the lower limit
and the maximum value to calculate the upper limit
:param tolerance: a number used to calculate the limits
:param mode: either ``absolute`` or ``relative``, specifies how the
``tolerance`` should be applied to the ``value``.
"""
values = toList(value)
if mode == 'relative':
lowerLimit = min(values) * (1 - tolerance)
upperLimit = max(values) * (1 + tolerance)
elif mode == 'absolute':
lowerLimit = min(values) - tolerance
upperLimit = max(values) + tolerance
else:
raise Exception('mode %s not specified' %(filepath, ))
return lowerLimit, upperLimit | def function[calcDeviationLimits, parameter[value, tolerance, mode]]:
constant[Returns the upper and lower deviation limits for a value and a given
tolerance, either as relative or a absolute difference.
:param value: can be a single value or a list of values if a list of values
is given, the minimal value will be used to calculate the lower limit
and the maximum value to calculate the upper limit
:param tolerance: a number used to calculate the limits
:param mode: either ``absolute`` or ``relative``, specifies how the
``tolerance`` should be applied to the ``value``.
]
variable[values] assign[=] call[name[toList], parameter[name[value]]]
if compare[name[mode] equal[==] constant[relative]] begin[:]
variable[lowerLimit] assign[=] binary_operation[call[name[min], parameter[name[values]]] * binary_operation[constant[1] - name[tolerance]]]
variable[upperLimit] assign[=] binary_operation[call[name[max], parameter[name[values]]] * binary_operation[constant[1] + name[tolerance]]]
return[tuple[[<ast.Name object at 0x7da1b28ac190>, <ast.Name object at 0x7da1b28ac5e0>]]] | keyword[def] identifier[calcDeviationLimits] ( identifier[value] , identifier[tolerance] , identifier[mode] ):
literal[string]
identifier[values] = identifier[toList] ( identifier[value] )
keyword[if] identifier[mode] == literal[string] :
identifier[lowerLimit] = identifier[min] ( identifier[values] )*( literal[int] - identifier[tolerance] )
identifier[upperLimit] = identifier[max] ( identifier[values] )*( literal[int] + identifier[tolerance] )
keyword[elif] identifier[mode] == literal[string] :
identifier[lowerLimit] = identifier[min] ( identifier[values] )- identifier[tolerance]
identifier[upperLimit] = identifier[max] ( identifier[values] )+ identifier[tolerance]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[filepath] ,))
keyword[return] identifier[lowerLimit] , identifier[upperLimit] | def calcDeviationLimits(value, tolerance, mode):
"""Returns the upper and lower deviation limits for a value and a given
tolerance, either as relative or a absolute difference.
:param value: can be a single value or a list of values if a list of values
is given, the minimal value will be used to calculate the lower limit
and the maximum value to calculate the upper limit
:param tolerance: a number used to calculate the limits
:param mode: either ``absolute`` or ``relative``, specifies how the
``tolerance`` should be applied to the ``value``.
"""
values = toList(value)
if mode == 'relative':
lowerLimit = min(values) * (1 - tolerance)
upperLimit = max(values) * (1 + tolerance) # depends on [control=['if'], data=[]]
elif mode == 'absolute':
lowerLimit = min(values) - tolerance
upperLimit = max(values) + tolerance # depends on [control=['if'], data=[]]
else:
raise Exception('mode %s not specified' % (filepath,))
return (lowerLimit, upperLimit) |
def _run(self, keep_successfull):
"""Interpret the parsed 010 AST
:returns: PfpDom
"""
# example self._ast.show():
# FileAST:
# Decl: data, [], [], []
# TypeDecl: data, []
# Struct: DATA
# Decl: a, [], [], []
# TypeDecl: a, []
# IdentifierType: ['char']
# Decl: b, [], [], []
# TypeDecl: b, []
# IdentifierType: ['char']
# Decl: c, [], [], []
# TypeDecl: c, []
# IdentifierType: ['char']
# Decl: d, [], [], []
# TypeDecl: d, []
# IdentifierType: ['char']
self._dlog("interpreting template")
try:
# it is important to pass the stream in as the stream
# may change (e.g. compressed data)
res = self._handle_node(self._ast, None, None, self._stream)
except errors.InterpReturn as e:
# TODO handle exit/return codes (e.g. return -1)
res = self._root
except errors.InterpExit as e:
res = self._root
except Exception as e:
if keep_successfull:
# return the root and set _pfp__error
res = self._root
res._pfp__error = e
else:
exc_type, exc_obj, traceback = sys.exc_info()
more_info = "\nException at {}:{}".format(
self._orig_filename,
self._coord.line
)
six.reraise(
errors.PfpError,
errors.PfpError(exc_obj.__class__.__name__ + ": " + exc_obj.args[0] + more_info if len(exc_obj.args) > 0 else more_info),
traceback
)
# final drop-in after everything has executed
if self._break_type != self.BREAK_NONE:
self.debugger.cmdloop("execution finished")
types = self.get_types()
res._pfp__types = types
return res | def function[_run, parameter[self, keep_successfull]]:
constant[Interpret the parsed 010 AST
:returns: PfpDom
]
call[name[self]._dlog, parameter[constant[interpreting template]]]
<ast.Try object at 0x7da1b0fde440>
if compare[name[self]._break_type not_equal[!=] name[self].BREAK_NONE] begin[:]
call[name[self].debugger.cmdloop, parameter[constant[execution finished]]]
variable[types] assign[=] call[name[self].get_types, parameter[]]
name[res]._pfp__types assign[=] name[types]
return[name[res]] | keyword[def] identifier[_run] ( identifier[self] , identifier[keep_successfull] ):
literal[string]
identifier[self] . identifier[_dlog] ( literal[string] )
keyword[try] :
identifier[res] = identifier[self] . identifier[_handle_node] ( identifier[self] . identifier[_ast] , keyword[None] , keyword[None] , identifier[self] . identifier[_stream] )
keyword[except] identifier[errors] . identifier[InterpReturn] keyword[as] identifier[e] :
identifier[res] = identifier[self] . identifier[_root]
keyword[except] identifier[errors] . identifier[InterpExit] keyword[as] identifier[e] :
identifier[res] = identifier[self] . identifier[_root]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[keep_successfull] :
identifier[res] = identifier[self] . identifier[_root]
identifier[res] . identifier[_pfp__error] = identifier[e]
keyword[else] :
identifier[exc_type] , identifier[exc_obj] , identifier[traceback] = identifier[sys] . identifier[exc_info] ()
identifier[more_info] = literal[string] . identifier[format] (
identifier[self] . identifier[_orig_filename] ,
identifier[self] . identifier[_coord] . identifier[line]
)
identifier[six] . identifier[reraise] (
identifier[errors] . identifier[PfpError] ,
identifier[errors] . identifier[PfpError] ( identifier[exc_obj] . identifier[__class__] . identifier[__name__] + literal[string] + identifier[exc_obj] . identifier[args] [ literal[int] ]+ identifier[more_info] keyword[if] identifier[len] ( identifier[exc_obj] . identifier[args] )> literal[int] keyword[else] identifier[more_info] ),
identifier[traceback]
)
keyword[if] identifier[self] . identifier[_break_type] != identifier[self] . identifier[BREAK_NONE] :
identifier[self] . identifier[debugger] . identifier[cmdloop] ( literal[string] )
identifier[types] = identifier[self] . identifier[get_types] ()
identifier[res] . identifier[_pfp__types] = identifier[types]
keyword[return] identifier[res] | def _run(self, keep_successfull):
"""Interpret the parsed 010 AST
:returns: PfpDom
"""
# example self._ast.show():
# FileAST:
# Decl: data, [], [], []
# TypeDecl: data, []
# Struct: DATA
# Decl: a, [], [], []
# TypeDecl: a, []
# IdentifierType: ['char']
# Decl: b, [], [], []
# TypeDecl: b, []
# IdentifierType: ['char']
# Decl: c, [], [], []
# TypeDecl: c, []
# IdentifierType: ['char']
# Decl: d, [], [], []
# TypeDecl: d, []
# IdentifierType: ['char']
self._dlog('interpreting template')
try:
# it is important to pass the stream in as the stream
# may change (e.g. compressed data)
res = self._handle_node(self._ast, None, None, self._stream) # depends on [control=['try'], data=[]]
except errors.InterpReturn as e:
# TODO handle exit/return codes (e.g. return -1)
res = self._root # depends on [control=['except'], data=[]]
except errors.InterpExit as e:
res = self._root # depends on [control=['except'], data=[]]
except Exception as e:
if keep_successfull:
# return the root and set _pfp__error
res = self._root
res._pfp__error = e # depends on [control=['if'], data=[]]
else:
(exc_type, exc_obj, traceback) = sys.exc_info()
more_info = '\nException at {}:{}'.format(self._orig_filename, self._coord.line)
six.reraise(errors.PfpError, errors.PfpError(exc_obj.__class__.__name__ + ': ' + exc_obj.args[0] + more_info if len(exc_obj.args) > 0 else more_info), traceback) # depends on [control=['except'], data=['e']]
# final drop-in after everything has executed
if self._break_type != self.BREAK_NONE:
self.debugger.cmdloop('execution finished') # depends on [control=['if'], data=[]]
types = self.get_types()
res._pfp__types = types
return res |
def _check_pillar_minions(self, expr, delimiter, greedy):
'''
Return the minions found by looking via pillar
'''
return self._check_cache_minions(expr, delimiter, greedy, 'pillar') | def function[_check_pillar_minions, parameter[self, expr, delimiter, greedy]]:
constant[
Return the minions found by looking via pillar
]
return[call[name[self]._check_cache_minions, parameter[name[expr], name[delimiter], name[greedy], constant[pillar]]]] | keyword[def] identifier[_check_pillar_minions] ( identifier[self] , identifier[expr] , identifier[delimiter] , identifier[greedy] ):
literal[string]
keyword[return] identifier[self] . identifier[_check_cache_minions] ( identifier[expr] , identifier[delimiter] , identifier[greedy] , literal[string] ) | def _check_pillar_minions(self, expr, delimiter, greedy):
"""
Return the minions found by looking via pillar
"""
return self._check_cache_minions(expr, delimiter, greedy, 'pillar') |
def encode(data):
'''
bytes -> str
'''
if riemann.network.CASHADDR_PREFIX is None:
raise ValueError('Network {} does not support cashaddresses.'
.format(riemann.get_current_network_name()))
data = convertbits(data, 8, 5)
checksum = calculate_checksum(riemann.network.CASHADDR_PREFIX, data)
payload = b32encode(data + checksum)
form = '{prefix}:{payload}'
return form.format(
prefix=riemann.network.CASHADDR_PREFIX,
payload=payload) | def function[encode, parameter[data]]:
constant[
bytes -> str
]
if compare[name[riemann].network.CASHADDR_PREFIX is constant[None]] begin[:]
<ast.Raise object at 0x7da1b05454e0>
variable[data] assign[=] call[name[convertbits], parameter[name[data], constant[8], constant[5]]]
variable[checksum] assign[=] call[name[calculate_checksum], parameter[name[riemann].network.CASHADDR_PREFIX, name[data]]]
variable[payload] assign[=] call[name[b32encode], parameter[binary_operation[name[data] + name[checksum]]]]
variable[form] assign[=] constant[{prefix}:{payload}]
return[call[name[form].format, parameter[]]] | keyword[def] identifier[encode] ( identifier[data] ):
literal[string]
keyword[if] identifier[riemann] . identifier[network] . identifier[CASHADDR_PREFIX] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
. identifier[format] ( identifier[riemann] . identifier[get_current_network_name] ()))
identifier[data] = identifier[convertbits] ( identifier[data] , literal[int] , literal[int] )
identifier[checksum] = identifier[calculate_checksum] ( identifier[riemann] . identifier[network] . identifier[CASHADDR_PREFIX] , identifier[data] )
identifier[payload] = identifier[b32encode] ( identifier[data] + identifier[checksum] )
identifier[form] = literal[string]
keyword[return] identifier[form] . identifier[format] (
identifier[prefix] = identifier[riemann] . identifier[network] . identifier[CASHADDR_PREFIX] ,
identifier[payload] = identifier[payload] ) | def encode(data):
"""
bytes -> str
"""
if riemann.network.CASHADDR_PREFIX is None:
raise ValueError('Network {} does not support cashaddresses.'.format(riemann.get_current_network_name())) # depends on [control=['if'], data=[]]
data = convertbits(data, 8, 5)
checksum = calculate_checksum(riemann.network.CASHADDR_PREFIX, data)
payload = b32encode(data + checksum)
form = '{prefix}:{payload}'
return form.format(prefix=riemann.network.CASHADDR_PREFIX, payload=payload) |
def _get_all_file_versions(self, secure_data_path, limit=None):
"""
Convenience function that returns a generator yielding the contents of all versions of
a file and its version info
secure_data_path -- full path to the file in the safety deposit box
limit -- Default(100), limits how many records to be returned from the api at once.
"""
for secret in self._get_all_file_version_ids(secure_data_path, limit):
yield {'secret': self.get_file_data(secure_data_path, version=secret['id']),
'version': secret} | def function[_get_all_file_versions, parameter[self, secure_data_path, limit]]:
constant[
Convenience function that returns a generator yielding the contents of all versions of
a file and its version info
secure_data_path -- full path to the file in the safety deposit box
limit -- Default(100), limits how many records to be returned from the api at once.
]
for taget[name[secret]] in starred[call[name[self]._get_all_file_version_ids, parameter[name[secure_data_path], name[limit]]]] begin[:]
<ast.Yield object at 0x7da1b04a4e20> | keyword[def] identifier[_get_all_file_versions] ( identifier[self] , identifier[secure_data_path] , identifier[limit] = keyword[None] ):
literal[string]
keyword[for] identifier[secret] keyword[in] identifier[self] . identifier[_get_all_file_version_ids] ( identifier[secure_data_path] , identifier[limit] ):
keyword[yield] { literal[string] : identifier[self] . identifier[get_file_data] ( identifier[secure_data_path] , identifier[version] = identifier[secret] [ literal[string] ]),
literal[string] : identifier[secret] } | def _get_all_file_versions(self, secure_data_path, limit=None):
"""
Convenience function that returns a generator yielding the contents of all versions of
a file and its version info
secure_data_path -- full path to the file in the safety deposit box
limit -- Default(100), limits how many records to be returned from the api at once.
"""
for secret in self._get_all_file_version_ids(secure_data_path, limit):
yield {'secret': self.get_file_data(secure_data_path, version=secret['id']), 'version': secret} # depends on [control=['for'], data=['secret']] |
def _set_scores(self):
"""
Compute anomaly scores for the time series by sliding both lagging window and future window.
"""
anom_scores = {}
self._generate_SAX()
self._construct_all_SAX_chunk_dict()
length = self.time_series_length
lws = self.lag_window_size
fws = self.future_window_size
for i, timestamp in enumerate(self.time_series.timestamps):
if i < lws or i > length - fws:
anom_scores[timestamp] = 0
else:
anom_scores[timestamp] = self._compute_anom_score_between_two_windows(i)
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores)) | def function[_set_scores, parameter[self]]:
constant[
Compute anomaly scores for the time series by sliding both lagging window and future window.
]
variable[anom_scores] assign[=] dictionary[[], []]
call[name[self]._generate_SAX, parameter[]]
call[name[self]._construct_all_SAX_chunk_dict, parameter[]]
variable[length] assign[=] name[self].time_series_length
variable[lws] assign[=] name[self].lag_window_size
variable[fws] assign[=] name[self].future_window_size
for taget[tuple[[<ast.Name object at 0x7da18eb54940>, <ast.Name object at 0x7da18eb57a60>]]] in starred[call[name[enumerate], parameter[name[self].time_series.timestamps]]] begin[:]
if <ast.BoolOp object at 0x7da18eb56710> begin[:]
call[name[anom_scores]][name[timestamp]] assign[=] constant[0]
name[self].anom_scores assign[=] call[name[TimeSeries], parameter[call[name[self]._denoise_scores, parameter[name[anom_scores]]]]] | keyword[def] identifier[_set_scores] ( identifier[self] ):
literal[string]
identifier[anom_scores] ={}
identifier[self] . identifier[_generate_SAX] ()
identifier[self] . identifier[_construct_all_SAX_chunk_dict] ()
identifier[length] = identifier[self] . identifier[time_series_length]
identifier[lws] = identifier[self] . identifier[lag_window_size]
identifier[fws] = identifier[self] . identifier[future_window_size]
keyword[for] identifier[i] , identifier[timestamp] keyword[in] identifier[enumerate] ( identifier[self] . identifier[time_series] . identifier[timestamps] ):
keyword[if] identifier[i] < identifier[lws] keyword[or] identifier[i] > identifier[length] - identifier[fws] :
identifier[anom_scores] [ identifier[timestamp] ]= literal[int]
keyword[else] :
identifier[anom_scores] [ identifier[timestamp] ]= identifier[self] . identifier[_compute_anom_score_between_two_windows] ( identifier[i] )
identifier[self] . identifier[anom_scores] = identifier[TimeSeries] ( identifier[self] . identifier[_denoise_scores] ( identifier[anom_scores] )) | def _set_scores(self):
"""
Compute anomaly scores for the time series by sliding both lagging window and future window.
"""
anom_scores = {}
self._generate_SAX()
self._construct_all_SAX_chunk_dict()
length = self.time_series_length
lws = self.lag_window_size
fws = self.future_window_size
for (i, timestamp) in enumerate(self.time_series.timestamps):
if i < lws or i > length - fws:
anom_scores[timestamp] = 0 # depends on [control=['if'], data=[]]
else:
anom_scores[timestamp] = self._compute_anom_score_between_two_windows(i) # depends on [control=['for'], data=[]]
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores)) |
def parse(argv, level=0):
"""
Parse sub-arguments between `[` and `]` recursively.
Examples
--------
```
>>> argv = ['--foo', 'bar', '--buz', '[', 'qux', '--quux', 'corge', ']']
>>> subarg.parse(argv)
['--foo', 'bar', '--buz', ['qux', '--quux', 'corge']]
```
Parameters
----------
argv : list of strings
list of arguments strings like `sys.argv`.
Returns
-------
nested list of arguments strings.
"""
nargs = []
for i in range(len(argv)):
if argv[i] == '[':
level += 1
if level == 1:
index = i + 1
elif argv[i] == ']':
level -= 1
sub = argv[index:i]
if level == 0:
nargs.append(parse(sub, level))
elif level == 0:
nargs.append(argv[i])
return nargs | def function[parse, parameter[argv, level]]:
constant[
Parse sub-arguments between `[` and `]` recursively.
Examples
--------
```
>>> argv = ['--foo', 'bar', '--buz', '[', 'qux', '--quux', 'corge', ']']
>>> subarg.parse(argv)
['--foo', 'bar', '--buz', ['qux', '--quux', 'corge']]
```
Parameters
----------
argv : list of strings
list of arguments strings like `sys.argv`.
Returns
-------
nested list of arguments strings.
]
variable[nargs] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[argv]]]]]] begin[:]
if compare[call[name[argv]][name[i]] equal[==] constant[[]] begin[:]
<ast.AugAssign object at 0x7da204344a00>
if compare[name[level] equal[==] constant[1]] begin[:]
variable[index] assign[=] binary_operation[name[i] + constant[1]]
return[name[nargs]] | keyword[def] identifier[parse] ( identifier[argv] , identifier[level] = literal[int] ):
literal[string]
identifier[nargs] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[argv] )):
keyword[if] identifier[argv] [ identifier[i] ]== literal[string] :
identifier[level] += literal[int]
keyword[if] identifier[level] == literal[int] :
identifier[index] = identifier[i] + literal[int]
keyword[elif] identifier[argv] [ identifier[i] ]== literal[string] :
identifier[level] -= literal[int]
identifier[sub] = identifier[argv] [ identifier[index] : identifier[i] ]
keyword[if] identifier[level] == literal[int] :
identifier[nargs] . identifier[append] ( identifier[parse] ( identifier[sub] , identifier[level] ))
keyword[elif] identifier[level] == literal[int] :
identifier[nargs] . identifier[append] ( identifier[argv] [ identifier[i] ])
keyword[return] identifier[nargs] | def parse(argv, level=0):
"""
Parse sub-arguments between `[` and `]` recursively.
Examples
--------
```
>>> argv = ['--foo', 'bar', '--buz', '[', 'qux', '--quux', 'corge', ']']
>>> subarg.parse(argv)
['--foo', 'bar', '--buz', ['qux', '--quux', 'corge']]
```
Parameters
----------
argv : list of strings
list of arguments strings like `sys.argv`.
Returns
-------
nested list of arguments strings.
"""
nargs = []
for i in range(len(argv)):
if argv[i] == '[':
level += 1
if level == 1:
index = i + 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif argv[i] == ']':
level -= 1
sub = argv[index:i]
if level == 0:
nargs.append(parse(sub, level)) # depends on [control=['if'], data=['level']] # depends on [control=['if'], data=[]]
elif level == 0:
nargs.append(argv[i]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return nargs |
def file_map(path, instructions, **kw):
'''
file_map(path, instructions) yields a file-map object for the given path and instruction-set.
file_map(None, instructions) yields a lambda of exactly one argument that is equivalent to the
following: lambda p: file_map(p, instructions)
File-map objects are pimms immutable objects that combine a format-spec for a directory
(instructions) with a directory to yield a lazily-loaded data object. The format-spec is not
currently documented, but interested users should see the variable
neuropythy.hcp.files.hcp_filemap_instructions.
The following options can be given:
* path_parameters (default: None) may be set to a map of parameters that are used to format the
filenames in the instructions.
* data_hierarchy (default: None) may specify how the data should be nested; see the variable
neuropythy.hcp.files.hcp_filemap_data_hierarchy.
* load_function (default: None) may specify the function that is used to load filenames; if
None then neuropythy.io.load is used.
* meta_data (default: None) may be passed on to the FileMap object.
Any additional keyword arguments given to the file_map function will be used as supplemental
paths.
'''
if path: return FileMap(path, instructions, **kw)
else: return lambda path:file_map(path, instructions, **kw) | def function[file_map, parameter[path, instructions]]:
constant[
file_map(path, instructions) yields a file-map object for the given path and instruction-set.
file_map(None, instructions) yields a lambda of exactly one argument that is equivalent to the
following: lambda p: file_map(p, instructions)
File-map objects are pimms immutable objects that combine a format-spec for a directory
(instructions) with a directory to yield a lazily-loaded data object. The format-spec is not
currently documented, but interested users should see the variable
neuropythy.hcp.files.hcp_filemap_instructions.
The following options can be given:
* path_parameters (default: None) may be set to a map of parameters that are used to format the
filenames in the instructions.
* data_hierarchy (default: None) may specify how the data should be nested; see the variable
neuropythy.hcp.files.hcp_filemap_data_hierarchy.
* load_function (default: None) may specify the function that is used to load filenames; if
None then neuropythy.io.load is used.
* meta_data (default: None) may be passed on to the FileMap object.
Any additional keyword arguments given to the file_map function will be used as supplemental
paths.
]
if name[path] begin[:]
return[call[name[FileMap], parameter[name[path], name[instructions]]]] | keyword[def] identifier[file_map] ( identifier[path] , identifier[instructions] ,** identifier[kw] ):
literal[string]
keyword[if] identifier[path] : keyword[return] identifier[FileMap] ( identifier[path] , identifier[instructions] ,** identifier[kw] )
keyword[else] : keyword[return] keyword[lambda] identifier[path] : identifier[file_map] ( identifier[path] , identifier[instructions] ,** identifier[kw] ) | def file_map(path, instructions, **kw):
"""
file_map(path, instructions) yields a file-map object for the given path and instruction-set.
file_map(None, instructions) yields a lambda of exactly one argument that is equivalent to the
following: lambda p: file_map(p, instructions)
File-map objects are pimms immutable objects that combine a format-spec for a directory
(instructions) with a directory to yield a lazily-loaded data object. The format-spec is not
currently documented, but interested users should see the variable
neuropythy.hcp.files.hcp_filemap_instructions.
The following options can be given:
* path_parameters (default: None) may be set to a map of parameters that are used to format the
filenames in the instructions.
* data_hierarchy (default: None) may specify how the data should be nested; see the variable
neuropythy.hcp.files.hcp_filemap_data_hierarchy.
* load_function (default: None) may specify the function that is used to load filenames; if
None then neuropythy.io.load is used.
* meta_data (default: None) may be passed on to the FileMap object.
Any additional keyword arguments given to the file_map function will be used as supplemental
paths.
"""
if path:
return FileMap(path, instructions, **kw) # depends on [control=['if'], data=[]]
else:
return lambda path: file_map(path, instructions, **kw) |
def create(self, instance, parameters, existing):
""" Create the instance
Args:
instance (AtlasServiceInstance.Instance): Existing or New instance
parameters (dict): Parameters for the instance
existing (bool): Create an instance on an existing Atlas cluster
Returns:
ProvisionedServiceSpec: Status
Raises:
ErrInstanceAlreadyExists: If instance exists but with different parameters
ErrClusterNotFound: Cluster does not exist
"""
if not instance.isProvisioned():
# Set parameters
instance.parameters = parameters
# Existing cluster
if existing and not self.backend.atlas.Clusters.is_existing_cluster(instance.parameters[self.backend.config.PARAMETER_CLUSTER]):
# We need to use an existing cluster that is not available !
raise ErrClusterNotFound(instance.parameters[self.backend.config.PARAMETER_CLUSTER])
elif not existing:
# We need to create a new cluster
# We should not reach this code because the AtlasBroker.provision should
# raise an ErrPlanUnsupported before.
raise NotImplementedError()
result = self.backend.storage.store(instance)
# Provision done
return ProvisionedServiceSpec(ProvisionState.SUCCESSFUL_CREATED,
"",
str(result))
elif instance.parameters == parameters:
# Identical so nothing to do
return ProvisionedServiceSpec(ProvisionState.IDENTICAL_ALREADY_EXISTS,
"",
"duplicate")
else:
# Different parameters ...
raise ErrInstanceAlreadyExists() | def function[create, parameter[self, instance, parameters, existing]]:
constant[ Create the instance
Args:
instance (AtlasServiceInstance.Instance): Existing or New instance
parameters (dict): Parameters for the instance
existing (bool): Create an instance on an existing Atlas cluster
Returns:
ProvisionedServiceSpec: Status
Raises:
ErrInstanceAlreadyExists: If instance exists but with different parameters
ErrClusterNotFound: Cluster does not exist
]
if <ast.UnaryOp object at 0x7da1b27143a0> begin[:]
name[instance].parameters assign[=] name[parameters]
if <ast.BoolOp object at 0x7da1b2716080> begin[:]
<ast.Raise object at 0x7da1b2714f70>
variable[result] assign[=] call[name[self].backend.storage.store, parameter[name[instance]]]
return[call[name[ProvisionedServiceSpec], parameter[name[ProvisionState].SUCCESSFUL_CREATED, constant[], call[name[str], parameter[name[result]]]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[instance] , identifier[parameters] , identifier[existing] ):
literal[string]
keyword[if] keyword[not] identifier[instance] . identifier[isProvisioned] ():
identifier[instance] . identifier[parameters] = identifier[parameters]
keyword[if] identifier[existing] keyword[and] keyword[not] identifier[self] . identifier[backend] . identifier[atlas] . identifier[Clusters] . identifier[is_existing_cluster] ( identifier[instance] . identifier[parameters] [ identifier[self] . identifier[backend] . identifier[config] . identifier[PARAMETER_CLUSTER] ]):
keyword[raise] identifier[ErrClusterNotFound] ( identifier[instance] . identifier[parameters] [ identifier[self] . identifier[backend] . identifier[config] . identifier[PARAMETER_CLUSTER] ])
keyword[elif] keyword[not] identifier[existing] :
keyword[raise] identifier[NotImplementedError] ()
identifier[result] = identifier[self] . identifier[backend] . identifier[storage] . identifier[store] ( identifier[instance] )
keyword[return] identifier[ProvisionedServiceSpec] ( identifier[ProvisionState] . identifier[SUCCESSFUL_CREATED] ,
literal[string] ,
identifier[str] ( identifier[result] ))
keyword[elif] identifier[instance] . identifier[parameters] == identifier[parameters] :
keyword[return] identifier[ProvisionedServiceSpec] ( identifier[ProvisionState] . identifier[IDENTICAL_ALREADY_EXISTS] ,
literal[string] ,
literal[string] )
keyword[else] :
keyword[raise] identifier[ErrInstanceAlreadyExists] () | def create(self, instance, parameters, existing):
""" Create the instance
Args:
instance (AtlasServiceInstance.Instance): Existing or New instance
parameters (dict): Parameters for the instance
existing (bool): Create an instance on an existing Atlas cluster
Returns:
ProvisionedServiceSpec: Status
Raises:
ErrInstanceAlreadyExists: If instance exists but with different parameters
ErrClusterNotFound: Cluster does not exist
"""
if not instance.isProvisioned():
# Set parameters
instance.parameters = parameters
# Existing cluster
if existing and (not self.backend.atlas.Clusters.is_existing_cluster(instance.parameters[self.backend.config.PARAMETER_CLUSTER])):
# We need to use an existing cluster that is not available !
raise ErrClusterNotFound(instance.parameters[self.backend.config.PARAMETER_CLUSTER]) # depends on [control=['if'], data=[]]
elif not existing:
# We need to create a new cluster
# We should not reach this code because the AtlasBroker.provision should
# raise an ErrPlanUnsupported before.
raise NotImplementedError() # depends on [control=['if'], data=[]]
result = self.backend.storage.store(instance)
# Provision done
return ProvisionedServiceSpec(ProvisionState.SUCCESSFUL_CREATED, '', str(result)) # depends on [control=['if'], data=[]]
elif instance.parameters == parameters:
# Identical so nothing to do
return ProvisionedServiceSpec(ProvisionState.IDENTICAL_ALREADY_EXISTS, '', 'duplicate') # depends on [control=['if'], data=[]]
else:
# Different parameters ...
raise ErrInstanceAlreadyExists() |
def _from_dict(cls, _dict):
"""Initialize a QueryRelationsRelationship object from a json dictionary."""
args = {}
if 'type' in _dict:
args['type'] = _dict.get('type')
if 'frequency' in _dict:
args['frequency'] = _dict.get('frequency')
if 'arguments' in _dict:
args['arguments'] = [
QueryRelationsArgument._from_dict(x)
for x in (_dict.get('arguments'))
]
if 'evidence' in _dict:
args['evidence'] = [
QueryEvidence._from_dict(x) for x in (_dict.get('evidence'))
]
return cls(**args) | def function[_from_dict, parameter[cls, _dict]]:
constant[Initialize a QueryRelationsRelationship object from a json dictionary.]
variable[args] assign[=] dictionary[[], []]
if compare[constant[type] in name[_dict]] begin[:]
call[name[args]][constant[type]] assign[=] call[name[_dict].get, parameter[constant[type]]]
if compare[constant[frequency] in name[_dict]] begin[:]
call[name[args]][constant[frequency]] assign[=] call[name[_dict].get, parameter[constant[frequency]]]
if compare[constant[arguments] in name[_dict]] begin[:]
call[name[args]][constant[arguments]] assign[=] <ast.ListComp object at 0x7da1b1b45240>
if compare[constant[evidence] in name[_dict]] begin[:]
call[name[args]][constant[evidence]] assign[=] <ast.ListComp object at 0x7da1b1b47dc0>
return[call[name[cls], parameter[]]] | keyword[def] identifier[_from_dict] ( identifier[cls] , identifier[_dict] ):
literal[string]
identifier[args] ={}
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]= identifier[_dict] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]= identifier[_dict] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]=[
identifier[QueryRelationsArgument] . identifier[_from_dict] ( identifier[x] )
keyword[for] identifier[x] keyword[in] ( identifier[_dict] . identifier[get] ( literal[string] ))
]
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]=[
identifier[QueryEvidence] . identifier[_from_dict] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[_dict] . identifier[get] ( literal[string] ))
]
keyword[return] identifier[cls] (** identifier[args] ) | def _from_dict(cls, _dict):
"""Initialize a QueryRelationsRelationship object from a json dictionary."""
args = {}
if 'type' in _dict:
args['type'] = _dict.get('type') # depends on [control=['if'], data=['_dict']]
if 'frequency' in _dict:
args['frequency'] = _dict.get('frequency') # depends on [control=['if'], data=['_dict']]
if 'arguments' in _dict:
args['arguments'] = [QueryRelationsArgument._from_dict(x) for x in _dict.get('arguments')] # depends on [control=['if'], data=['_dict']]
if 'evidence' in _dict:
args['evidence'] = [QueryEvidence._from_dict(x) for x in _dict.get('evidence')] # depends on [control=['if'], data=['_dict']]
return cls(**args) |
def save(self, path=None, complevel=1, complib='zlib'):
"""
Save the container as an HDF5 archive.
Args:
path (str): Path where to save the container
"""
if path is None:
path = self.hexuid + '.hdf5'
elif os.path.isdir(path):
path += os.sep + self.hexuid + '.hdf5'
elif not (path.endswith('.hdf5') or path.endswith('.hdf')):
raise ValueError('File path must have a ".hdf5" or ".hdf" extension.')
with pd.HDFStore(path, 'w', complevel=complevel, complib=complib) as store:
store['kwargs'] = pd.Series()
store.get_storer('kwargs').attrs.metadata = self._rel()
fc = 0 # Field counter (see special handling of fields below)
for name, data in self._data().items():
if hasattr(data, '_revert_categories'):
data._revert_categories()
name = name[1:] if name.startswith('_') else name
if isinstance(data, Field): # Fields are handled separately
fname = 'FIELD{}_'.format(fc) + name + '/'
store[fname + 'data'] = pd.DataFrame(data)
for i, field in enumerate(data.field_values):
ffname = fname + 'values' + str(i)
if isinstance(field, pd.Series):
store[ffname] = pd.Series(field)
else:
store[ffname] = pd.DataFrame(field)
fc += 1
elif isinstance(data, Series):
s = pd.Series(data)
if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
s = s.astype('O')
store[name] = s
elif isinstance(data, DataFrame):
store[name] = pd.DataFrame(data)
elif isinstance(data, SparseSeries):
s = pd.SparseSeries(data)
if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
s = s.astype('O')
store[name] = s
elif isinstance(data, SparseDataFrame):
store[name] = pd.SparseDataFrame(data)
else:
if hasattr(data, 'dtype') and isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
data = data.astype('O')
else:
for col in data:
if isinstance(data[col].dtype, pd.types.dtypes.CategoricalDtype):
data[col] = data[col].astype('O')
store[name] = data
if hasattr(data, '_set_categories'):
data._set_categories() | def function[save, parameter[self, path, complevel, complib]]:
constant[
Save the container as an HDF5 archive.
Args:
path (str): Path where to save the container
]
if compare[name[path] is constant[None]] begin[:]
variable[path] assign[=] binary_operation[name[self].hexuid + constant[.hdf5]]
with call[name[pd].HDFStore, parameter[name[path], constant[w]]] begin[:]
call[name[store]][constant[kwargs]] assign[=] call[name[pd].Series, parameter[]]
call[name[store].get_storer, parameter[constant[kwargs]]].attrs.metadata assign[=] call[name[self]._rel, parameter[]]
variable[fc] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da2047eb8e0>, <ast.Name object at 0x7da2047ea170>]]] in starred[call[call[name[self]._data, parameter[]].items, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[data], constant[_revert_categories]]] begin[:]
call[name[data]._revert_categories, parameter[]]
variable[name] assign[=] <ast.IfExp object at 0x7da2047e8100>
if call[name[isinstance], parameter[name[data], name[Field]]] begin[:]
variable[fname] assign[=] binary_operation[binary_operation[call[constant[FIELD{}_].format, parameter[name[fc]]] + name[name]] + constant[/]]
call[name[store]][binary_operation[name[fname] + constant[data]]] assign[=] call[name[pd].DataFrame, parameter[name[data]]]
for taget[tuple[[<ast.Name object at 0x7da2047e8fd0>, <ast.Name object at 0x7da2047e9210>]]] in starred[call[name[enumerate], parameter[name[data].field_values]]] begin[:]
variable[ffname] assign[=] binary_operation[binary_operation[name[fname] + constant[values]] + call[name[str], parameter[name[i]]]]
if call[name[isinstance], parameter[name[field], name[pd].Series]] begin[:]
call[name[store]][name[ffname]] assign[=] call[name[pd].Series, parameter[name[field]]]
<ast.AugAssign object at 0x7da2047e8580>
if call[name[hasattr], parameter[name[data], constant[_set_categories]]] begin[:]
call[name[data]._set_categories, parameter[]] | keyword[def] identifier[save] ( identifier[self] , identifier[path] = keyword[None] , identifier[complevel] = literal[int] , identifier[complib] = literal[string] ):
literal[string]
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[path] = identifier[self] . identifier[hexuid] + literal[string]
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
identifier[path] += identifier[os] . identifier[sep] + identifier[self] . identifier[hexuid] + literal[string]
keyword[elif] keyword[not] ( identifier[path] . identifier[endswith] ( literal[string] ) keyword[or] identifier[path] . identifier[endswith] ( literal[string] )):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[with] identifier[pd] . identifier[HDFStore] ( identifier[path] , literal[string] , identifier[complevel] = identifier[complevel] , identifier[complib] = identifier[complib] ) keyword[as] identifier[store] :
identifier[store] [ literal[string] ]= identifier[pd] . identifier[Series] ()
identifier[store] . identifier[get_storer] ( literal[string] ). identifier[attrs] . identifier[metadata] = identifier[self] . identifier[_rel] ()
identifier[fc] = literal[int]
keyword[for] identifier[name] , identifier[data] keyword[in] identifier[self] . identifier[_data] (). identifier[items] ():
keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ):
identifier[data] . identifier[_revert_categories] ()
identifier[name] = identifier[name] [ literal[int] :] keyword[if] identifier[name] . identifier[startswith] ( literal[string] ) keyword[else] identifier[name]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[Field] ):
identifier[fname] = literal[string] . identifier[format] ( identifier[fc] )+ identifier[name] + literal[string]
identifier[store] [ identifier[fname] + literal[string] ]= identifier[pd] . identifier[DataFrame] ( identifier[data] )
keyword[for] identifier[i] , identifier[field] keyword[in] identifier[enumerate] ( identifier[data] . identifier[field_values] ):
identifier[ffname] = identifier[fname] + literal[string] + identifier[str] ( identifier[i] )
keyword[if] identifier[isinstance] ( identifier[field] , identifier[pd] . identifier[Series] ):
identifier[store] [ identifier[ffname] ]= identifier[pd] . identifier[Series] ( identifier[field] )
keyword[else] :
identifier[store] [ identifier[ffname] ]= identifier[pd] . identifier[DataFrame] ( identifier[field] )
identifier[fc] += literal[int]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[Series] ):
identifier[s] = identifier[pd] . identifier[Series] ( identifier[data] )
keyword[if] identifier[isinstance] ( identifier[data] . identifier[dtype] , identifier[pd] . identifier[types] . identifier[dtypes] . identifier[CategoricalDtype] ):
identifier[s] = identifier[s] . identifier[astype] ( literal[string] )
identifier[store] [ identifier[name] ]= identifier[s]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[DataFrame] ):
identifier[store] [ identifier[name] ]= identifier[pd] . identifier[DataFrame] ( identifier[data] )
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[SparseSeries] ):
identifier[s] = identifier[pd] . identifier[SparseSeries] ( identifier[data] )
keyword[if] identifier[isinstance] ( identifier[data] . identifier[dtype] , identifier[pd] . identifier[types] . identifier[dtypes] . identifier[CategoricalDtype] ):
identifier[s] = identifier[s] . identifier[astype] ( literal[string] )
identifier[store] [ identifier[name] ]= identifier[s]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[SparseDataFrame] ):
identifier[store] [ identifier[name] ]= identifier[pd] . identifier[SparseDataFrame] ( identifier[data] )
keyword[else] :
keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ) keyword[and] identifier[isinstance] ( identifier[data] . identifier[dtype] , identifier[pd] . identifier[types] . identifier[dtypes] . identifier[CategoricalDtype] ):
identifier[data] = identifier[data] . identifier[astype] ( literal[string] )
keyword[else] :
keyword[for] identifier[col] keyword[in] identifier[data] :
keyword[if] identifier[isinstance] ( identifier[data] [ identifier[col] ]. identifier[dtype] , identifier[pd] . identifier[types] . identifier[dtypes] . identifier[CategoricalDtype] ):
identifier[data] [ identifier[col] ]= identifier[data] [ identifier[col] ]. identifier[astype] ( literal[string] )
identifier[store] [ identifier[name] ]= identifier[data]
keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ):
identifier[data] . identifier[_set_categories] () | def save(self, path=None, complevel=1, complib='zlib'):
"""
Save the container as an HDF5 archive.
Args:
path (str): Path where to save the container
"""
if path is None:
path = self.hexuid + '.hdf5' # depends on [control=['if'], data=['path']]
elif os.path.isdir(path):
path += os.sep + self.hexuid + '.hdf5' # depends on [control=['if'], data=[]]
elif not (path.endswith('.hdf5') or path.endswith('.hdf')):
raise ValueError('File path must have a ".hdf5" or ".hdf" extension.') # depends on [control=['if'], data=[]]
with pd.HDFStore(path, 'w', complevel=complevel, complib=complib) as store:
store['kwargs'] = pd.Series()
store.get_storer('kwargs').attrs.metadata = self._rel()
fc = 0 # Field counter (see special handling of fields below)
for (name, data) in self._data().items():
if hasattr(data, '_revert_categories'):
data._revert_categories() # depends on [control=['if'], data=[]]
name = name[1:] if name.startswith('_') else name
if isinstance(data, Field): # Fields are handled separately
fname = 'FIELD{}_'.format(fc) + name + '/'
store[fname + 'data'] = pd.DataFrame(data)
for (i, field) in enumerate(data.field_values):
ffname = fname + 'values' + str(i)
if isinstance(field, pd.Series):
store[ffname] = pd.Series(field) # depends on [control=['if'], data=[]]
else:
store[ffname] = pd.DataFrame(field) # depends on [control=['for'], data=[]]
fc += 1 # depends on [control=['if'], data=[]]
elif isinstance(data, Series):
s = pd.Series(data)
if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
s = s.astype('O') # depends on [control=['if'], data=[]]
store[name] = s # depends on [control=['if'], data=[]]
elif isinstance(data, DataFrame):
store[name] = pd.DataFrame(data) # depends on [control=['if'], data=[]]
elif isinstance(data, SparseSeries):
s = pd.SparseSeries(data)
if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
s = s.astype('O') # depends on [control=['if'], data=[]]
store[name] = s # depends on [control=['if'], data=[]]
elif isinstance(data, SparseDataFrame):
store[name] = pd.SparseDataFrame(data) # depends on [control=['if'], data=[]]
else:
if hasattr(data, 'dtype') and isinstance(data.dtype, pd.types.dtypes.CategoricalDtype):
data = data.astype('O') # depends on [control=['if'], data=[]]
else:
for col in data:
if isinstance(data[col].dtype, pd.types.dtypes.CategoricalDtype):
data[col] = data[col].astype('O') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['col']]
store[name] = data
if hasattr(data, '_set_categories'):
data._set_categories() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['store']] |
def create_image_table(self, r=None):
""" Create and store a new ImageTable instance based on the current
Dataset. Will generally be called privately, but may be useful as a
convenience method in cases where the user wants to re-generate the
table with a new smoothing kernel of different radius.
Args:
r (int): An optional integer indicating the radius of the smoothing
kernel. By default, this is None, which will keep whatever
value is currently set in the Dataset instance.
"""
logger.info("Creating image table...")
if r is not None:
self.r = r
self.image_table = ImageTable(self) | def function[create_image_table, parameter[self, r]]:
constant[ Create and store a new ImageTable instance based on the current
Dataset. Will generally be called privately, but may be useful as a
convenience method in cases where the user wants to re-generate the
table with a new smoothing kernel of different radius.
Args:
r (int): An optional integer indicating the radius of the smoothing
kernel. By default, this is None, which will keep whatever
value is currently set in the Dataset instance.
]
call[name[logger].info, parameter[constant[Creating image table...]]]
if compare[name[r] is_not constant[None]] begin[:]
name[self].r assign[=] name[r]
name[self].image_table assign[=] call[name[ImageTable], parameter[name[self]]] | keyword[def] identifier[create_image_table] ( identifier[self] , identifier[r] = keyword[None] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[r] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[r] = identifier[r]
identifier[self] . identifier[image_table] = identifier[ImageTable] ( identifier[self] ) | def create_image_table(self, r=None):
""" Create and store a new ImageTable instance based on the current
Dataset. Will generally be called privately, but may be useful as a
convenience method in cases where the user wants to re-generate the
table with a new smoothing kernel of different radius.
Args:
r (int): An optional integer indicating the radius of the smoothing
kernel. By default, this is None, which will keep whatever
value is currently set in the Dataset instance.
"""
logger.info('Creating image table...')
if r is not None:
self.r = r # depends on [control=['if'], data=['r']]
self.image_table = ImageTable(self) |
def set_meta_profiling(how):
""" Enables or disables the profiling of metadata at the loading of a GMQLDataset
:param how: True if you want to analyze the metadata when a GMQLDataset is created
by a load_from_*. False otherwise. (Default=True)
:return: None
"""
global __metadata_profiling
if isinstance(how, bool):
__metadata_profiling = how
else:
raise TypeError("how must be boolean. {} was provided".format(type(how))) | def function[set_meta_profiling, parameter[how]]:
constant[ Enables or disables the profiling of metadata at the loading of a GMQLDataset
:param how: True if you want to analyze the metadata when a GMQLDataset is created
by a load_from_*. False otherwise. (Default=True)
:return: None
]
<ast.Global object at 0x7da1b1a77d30>
if call[name[isinstance], parameter[name[how], name[bool]]] begin[:]
variable[__metadata_profiling] assign[=] name[how] | keyword[def] identifier[set_meta_profiling] ( identifier[how] ):
literal[string]
keyword[global] identifier[__metadata_profiling]
keyword[if] identifier[isinstance] ( identifier[how] , identifier[bool] ):
identifier[__metadata_profiling] = identifier[how]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[how] ))) | def set_meta_profiling(how):
""" Enables or disables the profiling of metadata at the loading of a GMQLDataset
:param how: True if you want to analyze the metadata when a GMQLDataset is created
by a load_from_*. False otherwise. (Default=True)
:return: None
"""
global __metadata_profiling
if isinstance(how, bool):
__metadata_profiling = how # depends on [control=['if'], data=[]]
else:
raise TypeError('how must be boolean. {} was provided'.format(type(how))) |
def get_platform_gpio(**keywords):
"""Attempt to return a GPIO instance for the platform which the code is being
executed on. Currently supports only the Raspberry Pi using the RPi.GPIO
library and Beaglebone Black using the Adafruit_BBIO library. Will throw an
exception if a GPIO instance can't be created for the current platform. The
returned GPIO object is an instance of BaseGPIO.
"""
plat = Platform.platform_detect()
if plat == Platform.RASPBERRY_PI:
import RPi.GPIO
return RPiGPIOAdapter(RPi.GPIO, **keywords)
elif plat == Platform.BEAGLEBONE_BLACK:
import Adafruit_BBIO.GPIO
return AdafruitBBIOAdapter(Adafruit_BBIO.GPIO, **keywords)
elif plat == Platform.MINNOWBOARD:
import mraa
return AdafruitMinnowAdapter(mraa, **keywords)
elif plat == Platform.JETSON_NANO:
import Jetson.GPIO
return RPiGPIOAdapter(Jetson.GPIO, **keywords)
elif plat == Platform.UNKNOWN:
raise RuntimeError('Could not determine platform.') | def function[get_platform_gpio, parameter[]]:
constant[Attempt to return a GPIO instance for the platform which the code is being
executed on. Currently supports only the Raspberry Pi using the RPi.GPIO
library and Beaglebone Black using the Adafruit_BBIO library. Will throw an
exception if a GPIO instance can't be created for the current platform. The
returned GPIO object is an instance of BaseGPIO.
]
variable[plat] assign[=] call[name[Platform].platform_detect, parameter[]]
if compare[name[plat] equal[==] name[Platform].RASPBERRY_PI] begin[:]
import module[RPi.GPIO]
return[call[name[RPiGPIOAdapter], parameter[name[RPi].GPIO]]] | keyword[def] identifier[get_platform_gpio] (** identifier[keywords] ):
literal[string]
identifier[plat] = identifier[Platform] . identifier[platform_detect] ()
keyword[if] identifier[plat] == identifier[Platform] . identifier[RASPBERRY_PI] :
keyword[import] identifier[RPi] . identifier[GPIO]
keyword[return] identifier[RPiGPIOAdapter] ( identifier[RPi] . identifier[GPIO] ,** identifier[keywords] )
keyword[elif] identifier[plat] == identifier[Platform] . identifier[BEAGLEBONE_BLACK] :
keyword[import] identifier[Adafruit_BBIO] . identifier[GPIO]
keyword[return] identifier[AdafruitBBIOAdapter] ( identifier[Adafruit_BBIO] . identifier[GPIO] ,** identifier[keywords] )
keyword[elif] identifier[plat] == identifier[Platform] . identifier[MINNOWBOARD] :
keyword[import] identifier[mraa]
keyword[return] identifier[AdafruitMinnowAdapter] ( identifier[mraa] ,** identifier[keywords] )
keyword[elif] identifier[plat] == identifier[Platform] . identifier[JETSON_NANO] :
keyword[import] identifier[Jetson] . identifier[GPIO]
keyword[return] identifier[RPiGPIOAdapter] ( identifier[Jetson] . identifier[GPIO] ,** identifier[keywords] )
keyword[elif] identifier[plat] == identifier[Platform] . identifier[UNKNOWN] :
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def get_platform_gpio(**keywords):
"""Attempt to return a GPIO instance for the platform which the code is being
executed on. Currently supports only the Raspberry Pi using the RPi.GPIO
library and Beaglebone Black using the Adafruit_BBIO library. Will throw an
exception if a GPIO instance can't be created for the current platform. The
returned GPIO object is an instance of BaseGPIO.
"""
plat = Platform.platform_detect()
if plat == Platform.RASPBERRY_PI:
import RPi.GPIO
return RPiGPIOAdapter(RPi.GPIO, **keywords) # depends on [control=['if'], data=[]]
elif plat == Platform.BEAGLEBONE_BLACK:
import Adafruit_BBIO.GPIO
return AdafruitBBIOAdapter(Adafruit_BBIO.GPIO, **keywords) # depends on [control=['if'], data=[]]
elif plat == Platform.MINNOWBOARD:
import mraa
return AdafruitMinnowAdapter(mraa, **keywords) # depends on [control=['if'], data=[]]
elif plat == Platform.JETSON_NANO:
import Jetson.GPIO
return RPiGPIOAdapter(Jetson.GPIO, **keywords) # depends on [control=['if'], data=[]]
elif plat == Platform.UNKNOWN:
raise RuntimeError('Could not determine platform.') # depends on [control=['if'], data=[]] |
def object_list(self):
"""
Return this table's object_list, transformed (sorted, reversed,
filtered, etc) according to its meta options.
"""
def _sort(ob, ol):
reverse = ob.startswith("-")
ob = ob[1:] if reverse else ob
for column in self.columns:
if column.sort_key_fn is not None and column.name == ob:
return sorted(ol, key=column.sort_key_fn, reverse=reverse)
if self._meta.order_by and hasattr(ol, "order_by"):
return list(ol.order_by(*self._meta.order_by.split("|")))
return ol
ol = self._object_list
ob = self._meta.order_by
if not ob: return ol
if isinstance(ob, basestring):
return _sort(ob, ol)
elif isinstance(ob, list):
ob.reverse()
for fn in ob:
ol = _sort(fn, ol)
return ol | def function[object_list, parameter[self]]:
constant[
Return this table's object_list, transformed (sorted, reversed,
filtered, etc) according to its meta options.
]
def function[_sort, parameter[ob, ol]]:
variable[reverse] assign[=] call[name[ob].startswith, parameter[constant[-]]]
variable[ob] assign[=] <ast.IfExp object at 0x7da1b0acbd90>
for taget[name[column]] in starred[name[self].columns] begin[:]
if <ast.BoolOp object at 0x7da1b0ac8670> begin[:]
return[call[name[sorted], parameter[name[ol]]]]
if <ast.BoolOp object at 0x7da1b0ac8880> begin[:]
return[call[name[list], parameter[call[name[ol].order_by, parameter[<ast.Starred object at 0x7da1b0ac8e80>]]]]]
return[name[ol]]
variable[ol] assign[=] name[self]._object_list
variable[ob] assign[=] name[self]._meta.order_by
if <ast.UnaryOp object at 0x7da1b0a70130> begin[:]
return[name[ol]]
if call[name[isinstance], parameter[name[ob], name[basestring]]] begin[:]
return[call[name[_sort], parameter[name[ob], name[ol]]]]
return[name[ol]] | keyword[def] identifier[object_list] ( identifier[self] ):
literal[string]
keyword[def] identifier[_sort] ( identifier[ob] , identifier[ol] ):
identifier[reverse] = identifier[ob] . identifier[startswith] ( literal[string] )
identifier[ob] = identifier[ob] [ literal[int] :] keyword[if] identifier[reverse] keyword[else] identifier[ob]
keyword[for] identifier[column] keyword[in] identifier[self] . identifier[columns] :
keyword[if] identifier[column] . identifier[sort_key_fn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[column] . identifier[name] == identifier[ob] :
keyword[return] identifier[sorted] ( identifier[ol] , identifier[key] = identifier[column] . identifier[sort_key_fn] , identifier[reverse] = identifier[reverse] )
keyword[if] identifier[self] . identifier[_meta] . identifier[order_by] keyword[and] identifier[hasattr] ( identifier[ol] , literal[string] ):
keyword[return] identifier[list] ( identifier[ol] . identifier[order_by] (* identifier[self] . identifier[_meta] . identifier[order_by] . identifier[split] ( literal[string] )))
keyword[return] identifier[ol]
identifier[ol] = identifier[self] . identifier[_object_list]
identifier[ob] = identifier[self] . identifier[_meta] . identifier[order_by]
keyword[if] keyword[not] identifier[ob] : keyword[return] identifier[ol]
keyword[if] identifier[isinstance] ( identifier[ob] , identifier[basestring] ):
keyword[return] identifier[_sort] ( identifier[ob] , identifier[ol] )
keyword[elif] identifier[isinstance] ( identifier[ob] , identifier[list] ):
identifier[ob] . identifier[reverse] ()
keyword[for] identifier[fn] keyword[in] identifier[ob] :
identifier[ol] = identifier[_sort] ( identifier[fn] , identifier[ol] )
keyword[return] identifier[ol] | def object_list(self):
"""
Return this table's object_list, transformed (sorted, reversed,
filtered, etc) according to its meta options.
"""
def _sort(ob, ol):
reverse = ob.startswith('-')
ob = ob[1:] if reverse else ob
for column in self.columns:
if column.sort_key_fn is not None and column.name == ob:
return sorted(ol, key=column.sort_key_fn, reverse=reverse) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['column']]
if self._meta.order_by and hasattr(ol, 'order_by'):
return list(ol.order_by(*self._meta.order_by.split('|'))) # depends on [control=['if'], data=[]]
return ol
ol = self._object_list
ob = self._meta.order_by
if not ob:
return ol # depends on [control=['if'], data=[]]
if isinstance(ob, basestring):
return _sort(ob, ol) # depends on [control=['if'], data=[]]
elif isinstance(ob, list):
ob.reverse()
for fn in ob:
ol = _sort(fn, ol) # depends on [control=['for'], data=['fn']] # depends on [control=['if'], data=[]]
return ol |
def download_task(url, headers, destination, download_type='layer'):
'''download an image layer (.tar.gz) to a specified download folder.
This task is done by using local versions of the same download functions
that are used for the client.
core stream/download functions of the parent client.
Parameters
==========
image_id: the shasum id of the layer, already determined to not exist
repo_name: the image name (library/ubuntu) to retrieve
download_folder: download to this folder. If not set, uses temp.
'''
# Update the user what we are doing
bot.verbose("Downloading %s from %s" % (download_type, url))
# Step 1: Download the layer atomically
file_name = "%s.%s" % (destination,
next(tempfile._get_candidate_names()))
tar_download = download(url, file_name, headers=headers)
try:
shutil.move(tar_download, destination)
except Exception:
msg = "Cannot untar layer %s," % tar_download
msg += " was there a problem with download?"
bot.error(msg)
sys.exit(1)
return destination | def function[download_task, parameter[url, headers, destination, download_type]]:
constant[download an image layer (.tar.gz) to a specified download folder.
This task is done by using local versions of the same download functions
that are used for the client.
core stream/download functions of the parent client.
Parameters
==========
image_id: the shasum id of the layer, already determined to not exist
repo_name: the image name (library/ubuntu) to retrieve
download_folder: download to this folder. If not set, uses temp.
]
call[name[bot].verbose, parameter[binary_operation[constant[Downloading %s from %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b02421d0>, <ast.Name object at 0x7da1b0241330>]]]]]
variable[file_name] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0383280>, <ast.Call object at 0x7da1b03837c0>]]]
variable[tar_download] assign[=] call[name[download], parameter[name[url], name[file_name]]]
<ast.Try object at 0x7da1b0241900>
return[name[destination]] | keyword[def] identifier[download_task] ( identifier[url] , identifier[headers] , identifier[destination] , identifier[download_type] = literal[string] ):
literal[string]
identifier[bot] . identifier[verbose] ( literal[string] %( identifier[download_type] , identifier[url] ))
identifier[file_name] = literal[string] %( identifier[destination] ,
identifier[next] ( identifier[tempfile] . identifier[_get_candidate_names] ()))
identifier[tar_download] = identifier[download] ( identifier[url] , identifier[file_name] , identifier[headers] = identifier[headers] )
keyword[try] :
identifier[shutil] . identifier[move] ( identifier[tar_download] , identifier[destination] )
keyword[except] identifier[Exception] :
identifier[msg] = literal[string] % identifier[tar_download]
identifier[msg] += literal[string]
identifier[bot] . identifier[error] ( identifier[msg] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[destination] | def download_task(url, headers, destination, download_type='layer'):
"""download an image layer (.tar.gz) to a specified download folder.
This task is done by using local versions of the same download functions
that are used for the client.
core stream/download functions of the parent client.
Parameters
==========
image_id: the shasum id of the layer, already determined to not exist
repo_name: the image name (library/ubuntu) to retrieve
download_folder: download to this folder. If not set, uses temp.
"""
# Update the user what we are doing
bot.verbose('Downloading %s from %s' % (download_type, url))
# Step 1: Download the layer atomically
file_name = '%s.%s' % (destination, next(tempfile._get_candidate_names()))
tar_download = download(url, file_name, headers=headers)
try:
shutil.move(tar_download, destination) # depends on [control=['try'], data=[]]
except Exception:
msg = 'Cannot untar layer %s,' % tar_download
msg += ' was there a problem with download?'
bot.error(msg)
sys.exit(1) # depends on [control=['except'], data=[]]
return destination |
def remove(self, tag, nth=1):
"""Remove the n-th occurrence of tag in this message.
:param tag: FIX field tag number to be removed.
:param nth: Index of tag if repeating, first is 1.
:returns: Value of the field if removed, None otherwise."""
tag = fix_tag(tag)
nth = int(nth)
for i in range(len(self.pairs)):
t, v = self.pairs[i]
if t == tag:
nth -= 1
if nth == 0:
self.pairs.pop(i)
return v
return None | def function[remove, parameter[self, tag, nth]]:
constant[Remove the n-th occurrence of tag in this message.
:param tag: FIX field tag number to be removed.
:param nth: Index of tag if repeating, first is 1.
:returns: Value of the field if removed, None otherwise.]
variable[tag] assign[=] call[name[fix_tag], parameter[name[tag]]]
variable[nth] assign[=] call[name[int], parameter[name[nth]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].pairs]]]]] begin[:]
<ast.Tuple object at 0x7da1b1d5d2a0> assign[=] call[name[self].pairs][name[i]]
if compare[name[t] equal[==] name[tag]] begin[:]
<ast.AugAssign object at 0x7da1b1d067d0>
if compare[name[nth] equal[==] constant[0]] begin[:]
call[name[self].pairs.pop, parameter[name[i]]]
return[name[v]]
return[constant[None]] | keyword[def] identifier[remove] ( identifier[self] , identifier[tag] , identifier[nth] = literal[int] ):
literal[string]
identifier[tag] = identifier[fix_tag] ( identifier[tag] )
identifier[nth] = identifier[int] ( identifier[nth] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[pairs] )):
identifier[t] , identifier[v] = identifier[self] . identifier[pairs] [ identifier[i] ]
keyword[if] identifier[t] == identifier[tag] :
identifier[nth] -= literal[int]
keyword[if] identifier[nth] == literal[int] :
identifier[self] . identifier[pairs] . identifier[pop] ( identifier[i] )
keyword[return] identifier[v]
keyword[return] keyword[None] | def remove(self, tag, nth=1):
"""Remove the n-th occurrence of tag in this message.
:param tag: FIX field tag number to be removed.
:param nth: Index of tag if repeating, first is 1.
:returns: Value of the field if removed, None otherwise."""
tag = fix_tag(tag)
nth = int(nth)
for i in range(len(self.pairs)):
(t, v) = self.pairs[i]
if t == tag:
nth -= 1
if nth == 0:
self.pairs.pop(i)
return v # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return None |
def config_find(self,config_dirs=None,config_filename=None):
""" Attempt to use the config dir/config_filenames to
locate the configuration file requested. Some folks
would prefer to keep their config in ~ where it's in
plain sight rather than the buried application
specific location
"""
if config_dirs is None:
config_dirs = self._config_dirs
else:
if isinstance(config_dirs,basestring):
config_dirs = [config_dirs]
if config_filename is None:
config_filename = self._config_filename
for test_dir in config_dirs:
test_fpath = os.path.join(test_dir,config_filename)
if os.path.isfile(test_fpath):
return test_fpath
# No matches found
return | def function[config_find, parameter[self, config_dirs, config_filename]]:
constant[ Attempt to use the config dir/config_filenames to
locate the configuration file requested. Some folks
would prefer to keep their config in ~ where it's in
plain sight rather than the buried application
specific location
]
if compare[name[config_dirs] is constant[None]] begin[:]
variable[config_dirs] assign[=] name[self]._config_dirs
if compare[name[config_filename] is constant[None]] begin[:]
variable[config_filename] assign[=] name[self]._config_filename
for taget[name[test_dir]] in starred[name[config_dirs]] begin[:]
variable[test_fpath] assign[=] call[name[os].path.join, parameter[name[test_dir], name[config_filename]]]
if call[name[os].path.isfile, parameter[name[test_fpath]]] begin[:]
return[name[test_fpath]]
return[None] | keyword[def] identifier[config_find] ( identifier[self] , identifier[config_dirs] = keyword[None] , identifier[config_filename] = keyword[None] ):
literal[string]
keyword[if] identifier[config_dirs] keyword[is] keyword[None] :
identifier[config_dirs] = identifier[self] . identifier[_config_dirs]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[config_dirs] , identifier[basestring] ):
identifier[config_dirs] =[ identifier[config_dirs] ]
keyword[if] identifier[config_filename] keyword[is] keyword[None] :
identifier[config_filename] = identifier[self] . identifier[_config_filename]
keyword[for] identifier[test_dir] keyword[in] identifier[config_dirs] :
identifier[test_fpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[test_dir] , identifier[config_filename] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[test_fpath] ):
keyword[return] identifier[test_fpath]
keyword[return] | def config_find(self, config_dirs=None, config_filename=None):
""" Attempt to use the config dir/config_filenames to
locate the configuration file requested. Some folks
would prefer to keep their config in ~ where it's in
plain sight rather than the buried application
specific location
"""
if config_dirs is None:
config_dirs = self._config_dirs # depends on [control=['if'], data=['config_dirs']]
elif isinstance(config_dirs, basestring):
config_dirs = [config_dirs] # depends on [control=['if'], data=[]]
if config_filename is None:
config_filename = self._config_filename # depends on [control=['if'], data=['config_filename']]
for test_dir in config_dirs:
test_fpath = os.path.join(test_dir, config_filename)
if os.path.isfile(test_fpath):
return test_fpath # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['test_dir']]
# No matches found
return |
def phantom_decorate(f, get_or_add):
"""
Decorator for version-dependent fields.
If get_or_add is True (means get), we return s, self.phantom_value.
If it is False (means add), we return s.
"""
def wrapper(*args):
self, pkt, s = args[:3]
if phantom_mode(pkt):
if get_or_add:
return s, self.phantom_value
return s
return f(*args)
return wrapper | def function[phantom_decorate, parameter[f, get_or_add]]:
constant[
Decorator for version-dependent fields.
If get_or_add is True (means get), we return s, self.phantom_value.
If it is False (means add), we return s.
]
def function[wrapper, parameter[]]:
<ast.Tuple object at 0x7da1b215cb20> assign[=] call[name[args]][<ast.Slice object at 0x7da1b215e200>]
if call[name[phantom_mode], parameter[name[pkt]]] begin[:]
if name[get_or_add] begin[:]
return[tuple[[<ast.Name object at 0x7da1b215c2b0>, <ast.Attribute object at 0x7da1b215ccd0>]]]
return[name[s]]
return[call[name[f], parameter[<ast.Starred object at 0x7da1b215ecb0>]]]
return[name[wrapper]] | keyword[def] identifier[phantom_decorate] ( identifier[f] , identifier[get_or_add] ):
literal[string]
keyword[def] identifier[wrapper] (* identifier[args] ):
identifier[self] , identifier[pkt] , identifier[s] = identifier[args] [: literal[int] ]
keyword[if] identifier[phantom_mode] ( identifier[pkt] ):
keyword[if] identifier[get_or_add] :
keyword[return] identifier[s] , identifier[self] . identifier[phantom_value]
keyword[return] identifier[s]
keyword[return] identifier[f] (* identifier[args] )
keyword[return] identifier[wrapper] | def phantom_decorate(f, get_or_add):
"""
Decorator for version-dependent fields.
If get_or_add is True (means get), we return s, self.phantom_value.
If it is False (means add), we return s.
"""
def wrapper(*args):
(self, pkt, s) = args[:3]
if phantom_mode(pkt):
if get_or_add:
return (s, self.phantom_value) # depends on [control=['if'], data=[]]
return s # depends on [control=['if'], data=[]]
return f(*args)
return wrapper |
def connect(self, retry=0, delay=0):
"""Initiate connection to CM. Blocks until connected unless ``retry`` is specified.
:param retry: number of retries before returning. Unlimited when set to ``None``
:type retry: :class:`int`
:param delay: delay in secnds before connection attempt
:type delay: :class:`int`
:return: successful connection
:rtype: :class:`bool`
"""
if self.connected:
self._LOG.debug("Connect called, but we are connected?")
return
if self._connecting:
self._LOG.debug("Connect called, but we are already connecting.")
return
self._connecting = True
if delay:
self._LOG.debug("Delayed connect: %d seconds" % delay)
self.emit(self.EVENT_RECONNECT, delay)
self.sleep(delay)
self._LOG.debug("Connect initiated.")
for i, server_addr in enumerate(self.cm_servers):
if retry and i > retry:
return False
start = time()
if self.connection.connect(server_addr):
break
diff = time() - start
self._LOG.debug("Failed to connect. Retrying...")
if diff < 5:
self.sleep(5 - diff)
self.current_server_addr = server_addr
self.connected = True
self.emit(self.EVENT_CONNECTED)
self._recv_loop = gevent.spawn(self._recv_messages)
self._connecting = False
return True | def function[connect, parameter[self, retry, delay]]:
constant[Initiate connection to CM. Blocks until connected unless ``retry`` is specified.
:param retry: number of retries before returning. Unlimited when set to ``None``
:type retry: :class:`int`
:param delay: delay in secnds before connection attempt
:type delay: :class:`int`
:return: successful connection
:rtype: :class:`bool`
]
if name[self].connected begin[:]
call[name[self]._LOG.debug, parameter[constant[Connect called, but we are connected?]]]
return[None]
if name[self]._connecting begin[:]
call[name[self]._LOG.debug, parameter[constant[Connect called, but we are already connecting.]]]
return[None]
name[self]._connecting assign[=] constant[True]
if name[delay] begin[:]
call[name[self]._LOG.debug, parameter[binary_operation[constant[Delayed connect: %d seconds] <ast.Mod object at 0x7da2590d6920> name[delay]]]]
call[name[self].emit, parameter[name[self].EVENT_RECONNECT, name[delay]]]
call[name[self].sleep, parameter[name[delay]]]
call[name[self]._LOG.debug, parameter[constant[Connect initiated.]]]
for taget[tuple[[<ast.Name object at 0x7da1b1d482b0>, <ast.Name object at 0x7da1b1d4b1c0>]]] in starred[call[name[enumerate], parameter[name[self].cm_servers]]] begin[:]
if <ast.BoolOp object at 0x7da1b1d4abc0> begin[:]
return[constant[False]]
variable[start] assign[=] call[name[time], parameter[]]
if call[name[self].connection.connect, parameter[name[server_addr]]] begin[:]
break
variable[diff] assign[=] binary_operation[call[name[time], parameter[]] - name[start]]
call[name[self]._LOG.debug, parameter[constant[Failed to connect. Retrying...]]]
if compare[name[diff] less[<] constant[5]] begin[:]
call[name[self].sleep, parameter[binary_operation[constant[5] - name[diff]]]]
name[self].current_server_addr assign[=] name[server_addr]
name[self].connected assign[=] constant[True]
call[name[self].emit, parameter[name[self].EVENT_CONNECTED]]
name[self]._recv_loop assign[=] call[name[gevent].spawn, parameter[name[self]._recv_messages]]
name[self]._connecting assign[=] constant[False]
return[constant[True]] | keyword[def] identifier[connect] ( identifier[self] , identifier[retry] = literal[int] , identifier[delay] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[connected] :
identifier[self] . identifier[_LOG] . identifier[debug] ( literal[string] )
keyword[return]
keyword[if] identifier[self] . identifier[_connecting] :
identifier[self] . identifier[_LOG] . identifier[debug] ( literal[string] )
keyword[return]
identifier[self] . identifier[_connecting] = keyword[True]
keyword[if] identifier[delay] :
identifier[self] . identifier[_LOG] . identifier[debug] ( literal[string] % identifier[delay] )
identifier[self] . identifier[emit] ( identifier[self] . identifier[EVENT_RECONNECT] , identifier[delay] )
identifier[self] . identifier[sleep] ( identifier[delay] )
identifier[self] . identifier[_LOG] . identifier[debug] ( literal[string] )
keyword[for] identifier[i] , identifier[server_addr] keyword[in] identifier[enumerate] ( identifier[self] . identifier[cm_servers] ):
keyword[if] identifier[retry] keyword[and] identifier[i] > identifier[retry] :
keyword[return] keyword[False]
identifier[start] = identifier[time] ()
keyword[if] identifier[self] . identifier[connection] . identifier[connect] ( identifier[server_addr] ):
keyword[break]
identifier[diff] = identifier[time] ()- identifier[start]
identifier[self] . identifier[_LOG] . identifier[debug] ( literal[string] )
keyword[if] identifier[diff] < literal[int] :
identifier[self] . identifier[sleep] ( literal[int] - identifier[diff] )
identifier[self] . identifier[current_server_addr] = identifier[server_addr]
identifier[self] . identifier[connected] = keyword[True]
identifier[self] . identifier[emit] ( identifier[self] . identifier[EVENT_CONNECTED] )
identifier[self] . identifier[_recv_loop] = identifier[gevent] . identifier[spawn] ( identifier[self] . identifier[_recv_messages] )
identifier[self] . identifier[_connecting] = keyword[False]
keyword[return] keyword[True] | def connect(self, retry=0, delay=0):
"""Initiate connection to CM. Blocks until connected unless ``retry`` is specified.
:param retry: number of retries before returning. Unlimited when set to ``None``
:type retry: :class:`int`
:param delay: delay in secnds before connection attempt
:type delay: :class:`int`
:return: successful connection
:rtype: :class:`bool`
"""
if self.connected:
self._LOG.debug('Connect called, but we are connected?')
return # depends on [control=['if'], data=[]]
if self._connecting:
self._LOG.debug('Connect called, but we are already connecting.')
return # depends on [control=['if'], data=[]]
self._connecting = True
if delay:
self._LOG.debug('Delayed connect: %d seconds' % delay)
self.emit(self.EVENT_RECONNECT, delay)
self.sleep(delay) # depends on [control=['if'], data=[]]
self._LOG.debug('Connect initiated.')
for (i, server_addr) in enumerate(self.cm_servers):
if retry and i > retry:
return False # depends on [control=['if'], data=[]]
start = time()
if self.connection.connect(server_addr):
break # depends on [control=['if'], data=[]]
diff = time() - start
self._LOG.debug('Failed to connect. Retrying...')
if diff < 5:
self.sleep(5 - diff) # depends on [control=['if'], data=['diff']] # depends on [control=['for'], data=[]]
self.current_server_addr = server_addr
self.connected = True
self.emit(self.EVENT_CONNECTED)
self._recv_loop = gevent.spawn(self._recv_messages)
self._connecting = False
return True |
def __dumptable(self, table):
""" Dumps table on screen
for debugging purposes
"""
for x in table.table.keys():
sys.stdout.write("{0}\t<--- {1} {2}".format(x, table[x], type(table[x])))
if isinstance(table[x], ID):
sys.stdout(" {0}".format(table[x].value)),
sys.stdout.write("\n") | def function[__dumptable, parameter[self, table]]:
constant[ Dumps table on screen
for debugging purposes
]
for taget[name[x]] in starred[call[name[table].table.keys, parameter[]]] begin[:]
call[name[sys].stdout.write, parameter[call[constant[{0} <--- {1} {2}].format, parameter[name[x], call[name[table]][name[x]], call[name[type], parameter[call[name[table]][name[x]]]]]]]]
if call[name[isinstance], parameter[call[name[table]][name[x]], name[ID]]] begin[:]
tuple[[<ast.Call object at 0x7da20c76caf0>]]
call[name[sys].stdout.write, parameter[constant[
]]] | keyword[def] identifier[__dumptable] ( identifier[self] , identifier[table] ):
literal[string]
keyword[for] identifier[x] keyword[in] identifier[table] . identifier[table] . identifier[keys] ():
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[x] , identifier[table] [ identifier[x] ], identifier[type] ( identifier[table] [ identifier[x] ])))
keyword[if] identifier[isinstance] ( identifier[table] [ identifier[x] ], identifier[ID] ):
identifier[sys] . identifier[stdout] ( literal[string] . identifier[format] ( identifier[table] [ identifier[x] ]. identifier[value] )),
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] ) | def __dumptable(self, table):
""" Dumps table on screen
for debugging purposes
"""
for x in table.table.keys():
sys.stdout.write('{0}\t<--- {1} {2}'.format(x, table[x], type(table[x])))
if isinstance(table[x], ID):
(sys.stdout(' {0}'.format(table[x].value)),) # depends on [control=['if'], data=[]]
sys.stdout.write('\n') # depends on [control=['for'], data=['x']] |
def _run_program(self, bin, fastafile, params=None):
"""
Run HMS and predict motifs from a FASTA file.
Parameters
----------
bin : str
Command used to run the tool.
fastafile : str
Name of the FASTA input file.
params : dict, optional
Optional parameters. For some of the tools required parameters
are passed using this dictionary.
Returns
-------
motifs : list of Motif instances
The predicted motifs.
stdout : str
Standard out of the tool.
stderr : str
Standard error of the tool.
"""
params = self._parse_params(params)
default_params = {"width":10}
if params is not None:
default_params.update(params)
fgfile, summitfile, outfile = self._prepare_files(fastafile)
current_path = os.getcwd()
os.chdir(self.tmpdir)
cmd = "{} -i {} -w {} -dna 4 -iteration 50 -chain 20 -seqprop -0.1 -strand 2 -peaklocation {} -t_dof 3 -dep 2".format(
bin,
fgfile,
params['width'],
summitfile)
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
stdout,stderr = p.communicate()
os.chdir(current_path)
motifs = []
if os.path.exists(outfile):
with open(outfile) as f:
motifs = self.parse(f)
for i,m in enumerate(motifs):
m.id = "HMS_w{}_{}".format(params['width'], i + 1)
return motifs, stdout, stderr | def function[_run_program, parameter[self, bin, fastafile, params]]:
constant[
Run HMS and predict motifs from a FASTA file.
Parameters
----------
bin : str
Command used to run the tool.
fastafile : str
Name of the FASTA input file.
params : dict, optional
Optional parameters. For some of the tools required parameters
are passed using this dictionary.
Returns
-------
motifs : list of Motif instances
The predicted motifs.
stdout : str
Standard out of the tool.
stderr : str
Standard error of the tool.
]
variable[params] assign[=] call[name[self]._parse_params, parameter[name[params]]]
variable[default_params] assign[=] dictionary[[<ast.Constant object at 0x7da2054a4d30>], [<ast.Constant object at 0x7da2054a4460>]]
if compare[name[params] is_not constant[None]] begin[:]
call[name[default_params].update, parameter[name[params]]]
<ast.Tuple object at 0x7da2054a7a90> assign[=] call[name[self]._prepare_files, parameter[name[fastafile]]]
variable[current_path] assign[=] call[name[os].getcwd, parameter[]]
call[name[os].chdir, parameter[name[self].tmpdir]]
variable[cmd] assign[=] call[constant[{} -i {} -w {} -dna 4 -iteration 50 -chain 20 -seqprop -0.1 -strand 2 -peaklocation {} -t_dof 3 -dep 2].format, parameter[name[bin], name[fgfile], call[name[params]][constant[width]], name[summitfile]]]
variable[p] assign[=] call[name[Popen], parameter[name[cmd]]]
<ast.Tuple object at 0x7da2054a5d80> assign[=] call[name[p].communicate, parameter[]]
call[name[os].chdir, parameter[name[current_path]]]
variable[motifs] assign[=] list[[]]
if call[name[os].path.exists, parameter[name[outfile]]] begin[:]
with call[name[open], parameter[name[outfile]]] begin[:]
variable[motifs] assign[=] call[name[self].parse, parameter[name[f]]]
for taget[tuple[[<ast.Name object at 0x7da1b10b0430>, <ast.Name object at 0x7da1b10b1f00>]]] in starred[call[name[enumerate], parameter[name[motifs]]]] begin[:]
name[m].id assign[=] call[constant[HMS_w{}_{}].format, parameter[call[name[params]][constant[width]], binary_operation[name[i] + constant[1]]]]
return[tuple[[<ast.Name object at 0x7da1b10a71c0>, <ast.Name object at 0x7da1b10a7850>, <ast.Name object at 0x7da1b10a48e0>]]] | keyword[def] identifier[_run_program] ( identifier[self] , identifier[bin] , identifier[fastafile] , identifier[params] = keyword[None] ):
literal[string]
identifier[params] = identifier[self] . identifier[_parse_params] ( identifier[params] )
identifier[default_params] ={ literal[string] : literal[int] }
keyword[if] identifier[params] keyword[is] keyword[not] keyword[None] :
identifier[default_params] . identifier[update] ( identifier[params] )
identifier[fgfile] , identifier[summitfile] , identifier[outfile] = identifier[self] . identifier[_prepare_files] ( identifier[fastafile] )
identifier[current_path] = identifier[os] . identifier[getcwd] ()
identifier[os] . identifier[chdir] ( identifier[self] . identifier[tmpdir] )
identifier[cmd] = literal[string] . identifier[format] (
identifier[bin] ,
identifier[fgfile] ,
identifier[params] [ literal[string] ],
identifier[summitfile] )
identifier[p] = identifier[Popen] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] )
identifier[stdout] , identifier[stderr] = identifier[p] . identifier[communicate] ()
identifier[os] . identifier[chdir] ( identifier[current_path] )
identifier[motifs] =[]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[outfile] ):
keyword[with] identifier[open] ( identifier[outfile] ) keyword[as] identifier[f] :
identifier[motifs] = identifier[self] . identifier[parse] ( identifier[f] )
keyword[for] identifier[i] , identifier[m] keyword[in] identifier[enumerate] ( identifier[motifs] ):
identifier[m] . identifier[id] = literal[string] . identifier[format] ( identifier[params] [ literal[string] ], identifier[i] + literal[int] )
keyword[return] identifier[motifs] , identifier[stdout] , identifier[stderr] | def _run_program(self, bin, fastafile, params=None):
"""
Run HMS and predict motifs from a FASTA file.
Parameters
----------
bin : str
Command used to run the tool.
fastafile : str
Name of the FASTA input file.
params : dict, optional
Optional parameters. For some of the tools required parameters
are passed using this dictionary.
Returns
-------
motifs : list of Motif instances
The predicted motifs.
stdout : str
Standard out of the tool.
stderr : str
Standard error of the tool.
"""
params = self._parse_params(params)
default_params = {'width': 10}
if params is not None:
default_params.update(params) # depends on [control=['if'], data=['params']]
(fgfile, summitfile, outfile) = self._prepare_files(fastafile)
current_path = os.getcwd()
os.chdir(self.tmpdir)
cmd = '{} -i {} -w {} -dna 4 -iteration 50 -chain 20 -seqprop -0.1 -strand 2 -peaklocation {} -t_dof 3 -dep 2'.format(bin, fgfile, params['width'], summitfile)
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = p.communicate()
os.chdir(current_path)
motifs = []
if os.path.exists(outfile):
with open(outfile) as f:
motifs = self.parse(f)
for (i, m) in enumerate(motifs):
m.id = 'HMS_w{}_{}'.format(params['width'], i + 1) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
return (motifs, stdout, stderr) |
def minute(self, value=None):
"""Corresponds to IDD Field `minute`
Args:
value (int): value for IDD Field `minute`
value >= 0
value <= 60
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError('value {} need to be of type int '
'for field `minute`'.format(value))
if value < 0:
raise ValueError('value need to be greater or equal 0 '
'for field `minute`')
if value > 60:
raise ValueError('value need to be smaller 60 '
'for field `minute`')
self._minute = value | def function[minute, parameter[self, value]]:
constant[Corresponds to IDD Field `minute`
Args:
value (int): value for IDD Field `minute`
value >= 0
value <= 60
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0fb2e30>
if compare[name[value] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b0fb0b80>
if compare[name[value] greater[>] constant[60]] begin[:]
<ast.Raise object at 0x7da1b0fb17b0>
name[self]._minute assign[=] name[value] | keyword[def] identifier[minute] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[int] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
keyword[if] identifier[value] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[value] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[self] . identifier[_minute] = identifier[value] | def minute(self, value=None):
"""Corresponds to IDD Field `minute`
Args:
value (int): value for IDD Field `minute`
value >= 0
value <= 60
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = int(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type int for field `minute`'.format(value)) # depends on [control=['except'], data=[]]
if value < 0:
raise ValueError('value need to be greater or equal 0 for field `minute`') # depends on [control=['if'], data=[]]
if value > 60:
raise ValueError('value need to be smaller 60 for field `minute`') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']]
self._minute = value |
def sg_reshape(tensor, opt):
r"""Reshapes a tensor.
See `tf.reshape()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
shape: A tuple/list of integers. The destination shape.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
"""
assert opt.shape is not None, 'shape is mandatory.'
return tf.reshape(tensor, opt.shape, name=opt.name) | def function[sg_reshape, parameter[tensor, opt]]:
constant[Reshapes a tensor.
See `tf.reshape()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
shape: A tuple/list of integers. The destination shape.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
]
assert[compare[name[opt].shape is_not constant[None]]]
return[call[name[tf].reshape, parameter[name[tensor], name[opt].shape]]] | keyword[def] identifier[sg_reshape] ( identifier[tensor] , identifier[opt] ):
literal[string]
keyword[assert] identifier[opt] . identifier[shape] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[return] identifier[tf] . identifier[reshape] ( identifier[tensor] , identifier[opt] . identifier[shape] , identifier[name] = identifier[opt] . identifier[name] ) | def sg_reshape(tensor, opt):
"""Reshapes a tensor.
See `tf.reshape()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
shape: A tuple/list of integers. The destination shape.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
"""
assert opt.shape is not None, 'shape is mandatory.'
return tf.reshape(tensor, opt.shape, name=opt.name) |
def clean_files(files):
"""Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
"""
if isinstance(files, (list, tuple)):
for f in files:
yield clean_file(f)
else:
yield clean_file(files) | def function[clean_files, parameter[files]]:
constant[Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
]
if call[name[isinstance], parameter[name[files], tuple[[<ast.Name object at 0x7da20c6c6200>, <ast.Name object at 0x7da20c6c6cb0>]]]] begin[:]
for taget[name[f]] in starred[name[files]] begin[:]
<ast.Yield object at 0x7da20c6c67d0> | keyword[def] identifier[clean_files] ( identifier[files] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[files] ,( identifier[list] , identifier[tuple] )):
keyword[for] identifier[f] keyword[in] identifier[files] :
keyword[yield] identifier[clean_file] ( identifier[f] )
keyword[else] :
keyword[yield] identifier[clean_file] ( identifier[files] ) | def clean_files(files):
"""Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
"""
if isinstance(files, (list, tuple)):
for f in files:
yield clean_file(f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
else:
yield clean_file(files) |
def to_dict(self):
"""Transform to dictionary
Returns:
dict: dictionary with same content
"""
return {key: self.__getitem__(key).value for key in self.options()} | def function[to_dict, parameter[self]]:
constant[Transform to dictionary
Returns:
dict: dictionary with same content
]
return[<ast.DictComp object at 0x7da2054a7f70>] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
keyword[return] { identifier[key] : identifier[self] . identifier[__getitem__] ( identifier[key] ). identifier[value] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[options] ()} | def to_dict(self):
"""Transform to dictionary
Returns:
dict: dictionary with same content
"""
return {key: self.__getitem__(key).value for key in self.options()} |
def url_is_alive(url):
"""
Checks that a given URL is reachable.
:param url: A URL
:rtype: bool
"""
request = urllib.request.Request(url)
request.get_method = lambda: 'HEAD'
try:
urllib.request.urlopen(request)
return True
except urllib.request.HTTPError:
return False | def function[url_is_alive, parameter[url]]:
constant[
Checks that a given URL is reachable.
:param url: A URL
:rtype: bool
]
variable[request] assign[=] call[name[urllib].request.Request, parameter[name[url]]]
name[request].get_method assign[=] <ast.Lambda object at 0x7da1b0d62e90>
<ast.Try object at 0x7da1b0d606d0> | keyword[def] identifier[url_is_alive] ( identifier[url] ):
literal[string]
identifier[request] = identifier[urllib] . identifier[request] . identifier[Request] ( identifier[url] )
identifier[request] . identifier[get_method] = keyword[lambda] : literal[string]
keyword[try] :
identifier[urllib] . identifier[request] . identifier[urlopen] ( identifier[request] )
keyword[return] keyword[True]
keyword[except] identifier[urllib] . identifier[request] . identifier[HTTPError] :
keyword[return] keyword[False] | def url_is_alive(url):
"""
Checks that a given URL is reachable.
:param url: A URL
:rtype: bool
"""
request = urllib.request.Request(url)
request.get_method = lambda : 'HEAD'
try:
urllib.request.urlopen(request)
return True # depends on [control=['try'], data=[]]
except urllib.request.HTTPError:
return False # depends on [control=['except'], data=[]] |
def on_use_runtime_value_toggled(self, widget, path):
"""Try to set the use runtime value flag to the newly entered one
"""
try:
data_port_id = self.list_store[path][self.ID_STORAGE_ID]
self.toggle_runtime_value_usage(data_port_id)
except TypeError as e:
logger.exception("Error while trying to change the use_runtime_value flag") | def function[on_use_runtime_value_toggled, parameter[self, widget, path]]:
constant[Try to set the use runtime value flag to the newly entered one
]
<ast.Try object at 0x7da1b26afe50> | keyword[def] identifier[on_use_runtime_value_toggled] ( identifier[self] , identifier[widget] , identifier[path] ):
literal[string]
keyword[try] :
identifier[data_port_id] = identifier[self] . identifier[list_store] [ identifier[path] ][ identifier[self] . identifier[ID_STORAGE_ID] ]
identifier[self] . identifier[toggle_runtime_value_usage] ( identifier[data_port_id] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( literal[string] ) | def on_use_runtime_value_toggled(self, widget, path):
"""Try to set the use runtime value flag to the newly entered one
"""
try:
data_port_id = self.list_store[path][self.ID_STORAGE_ID]
self.toggle_runtime_value_usage(data_port_id) # depends on [control=['try'], data=[]]
except TypeError as e:
logger.exception('Error while trying to change the use_runtime_value flag') # depends on [control=['except'], data=[]] |
def write(self, stream):
'''
Write PLY data to a writeable file-like object or filename.
'''
(must_close, stream) = _open_stream(stream, 'write')
try:
stream.write(self.header.encode('ascii'))
stream.write(b'\n')
for elt in self:
elt._write(stream, self.text, self.byte_order)
finally:
if must_close:
stream.close() | def function[write, parameter[self, stream]]:
constant[
Write PLY data to a writeable file-like object or filename.
]
<ast.Tuple object at 0x7da1b0b44430> assign[=] call[name[_open_stream], parameter[name[stream], constant[write]]]
<ast.Try object at 0x7da1b0b44100> | keyword[def] identifier[write] ( identifier[self] , identifier[stream] ):
literal[string]
( identifier[must_close] , identifier[stream] )= identifier[_open_stream] ( identifier[stream] , literal[string] )
keyword[try] :
identifier[stream] . identifier[write] ( identifier[self] . identifier[header] . identifier[encode] ( literal[string] ))
identifier[stream] . identifier[write] ( literal[string] )
keyword[for] identifier[elt] keyword[in] identifier[self] :
identifier[elt] . identifier[_write] ( identifier[stream] , identifier[self] . identifier[text] , identifier[self] . identifier[byte_order] )
keyword[finally] :
keyword[if] identifier[must_close] :
identifier[stream] . identifier[close] () | def write(self, stream):
"""
Write PLY data to a writeable file-like object or filename.
"""
(must_close, stream) = _open_stream(stream, 'write')
try:
stream.write(self.header.encode('ascii'))
stream.write(b'\n')
for elt in self:
elt._write(stream, self.text, self.byte_order) # depends on [control=['for'], data=['elt']] # depends on [control=['try'], data=[]]
finally:
if must_close:
stream.close() # depends on [control=['if'], data=[]] |
def detect_circle(nodes):
""" Wrapper for recursive _detect_circle function """
# Verify nodes and traveled types
if not isinstance(nodes, dict):
raise TypeError('"nodes" must be a dictionary')
dependencies = set(nodes.keys())
traveled = []
heads = _detect_circle(nodes, dependencies, traveled)
return DependencyTree(heads) | def function[detect_circle, parameter[nodes]]:
constant[ Wrapper for recursive _detect_circle function ]
if <ast.UnaryOp object at 0x7da2054a4280> begin[:]
<ast.Raise object at 0x7da1b1455c00>
variable[dependencies] assign[=] call[name[set], parameter[call[name[nodes].keys, parameter[]]]]
variable[traveled] assign[=] list[[]]
variable[heads] assign[=] call[name[_detect_circle], parameter[name[nodes], name[dependencies], name[traveled]]]
return[call[name[DependencyTree], parameter[name[heads]]]] | keyword[def] identifier[detect_circle] ( identifier[nodes] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[nodes] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[dependencies] = identifier[set] ( identifier[nodes] . identifier[keys] ())
identifier[traveled] =[]
identifier[heads] = identifier[_detect_circle] ( identifier[nodes] , identifier[dependencies] , identifier[traveled] )
keyword[return] identifier[DependencyTree] ( identifier[heads] ) | def detect_circle(nodes):
""" Wrapper for recursive _detect_circle function """
# Verify nodes and traveled types
if not isinstance(nodes, dict):
raise TypeError('"nodes" must be a dictionary') # depends on [control=['if'], data=[]]
dependencies = set(nodes.keys())
traveled = []
heads = _detect_circle(nodes, dependencies, traveled)
return DependencyTree(heads) |
def __datetime_to_epoch(self, date_time):
"""
Converts a python datetime to unix epoch, accounting for
time zones and such.
Assumes UTC if timezone is not given.
"""
date_time_utc = None
if date_time.tzinfo is None:
date_time_utc = date_time.replace(tzinfo=pytz.utc)
else:
date_time_utc = date_time.astimezone(pytz.utc)
epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
return (date_time_utc - epoch_utc).total_seconds() | def function[__datetime_to_epoch, parameter[self, date_time]]:
constant[
Converts a python datetime to unix epoch, accounting for
time zones and such.
Assumes UTC if timezone is not given.
]
variable[date_time_utc] assign[=] constant[None]
if compare[name[date_time].tzinfo is constant[None]] begin[:]
variable[date_time_utc] assign[=] call[name[date_time].replace, parameter[]]
variable[epoch_utc] assign[=] call[call[name[datetime].datetime.utcfromtimestamp, parameter[constant[0]]].replace, parameter[]]
return[call[binary_operation[name[date_time_utc] - name[epoch_utc]].total_seconds, parameter[]]] | keyword[def] identifier[__datetime_to_epoch] ( identifier[self] , identifier[date_time] ):
literal[string]
identifier[date_time_utc] = keyword[None]
keyword[if] identifier[date_time] . identifier[tzinfo] keyword[is] keyword[None] :
identifier[date_time_utc] = identifier[date_time] . identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[utc] )
keyword[else] :
identifier[date_time_utc] = identifier[date_time] . identifier[astimezone] ( identifier[pytz] . identifier[utc] )
identifier[epoch_utc] = identifier[datetime] . identifier[datetime] . identifier[utcfromtimestamp] ( literal[int] ). identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[utc] )
keyword[return] ( identifier[date_time_utc] - identifier[epoch_utc] ). identifier[total_seconds] () | def __datetime_to_epoch(self, date_time):
"""
Converts a python datetime to unix epoch, accounting for
time zones and such.
Assumes UTC if timezone is not given.
"""
date_time_utc = None
if date_time.tzinfo is None:
date_time_utc = date_time.replace(tzinfo=pytz.utc) # depends on [control=['if'], data=[]]
else:
date_time_utc = date_time.astimezone(pytz.utc)
epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
return (date_time_utc - epoch_utc).total_seconds() |
def day(self, value=None):
"""Corresponds to IDD Field `day`
Args:
value (int): value for IDD Field `day`
value >= 1
value <= 31
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError('value {} need to be of type int '
'for field `day`'.format(value))
if value < 1:
raise ValueError('value need to be greater or equal 1 '
'for field `day`')
if value > 31:
raise ValueError('value need to be smaller 31 '
'for field `day`')
self._day = value | def function[day, parameter[self, value]]:
constant[Corresponds to IDD Field `day`
Args:
value (int): value for IDD Field `day`
value >= 1
value <= 31
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0f905b0>
if compare[name[value] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0f90460>
if compare[name[value] greater[>] constant[31]] begin[:]
<ast.Raise object at 0x7da1b0f2b760>
name[self]._day assign[=] name[value] | keyword[def] identifier[day] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[int] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
keyword[if] identifier[value] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[value] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[self] . identifier[_day] = identifier[value] | def day(self, value=None):
"""Corresponds to IDD Field `day`
Args:
value (int): value for IDD Field `day`
value >= 1
value <= 31
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = int(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type int for field `day`'.format(value)) # depends on [control=['except'], data=[]]
if value < 1:
raise ValueError('value need to be greater or equal 1 for field `day`') # depends on [control=['if'], data=[]]
if value > 31:
raise ValueError('value need to be smaller 31 for field `day`') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']]
self._day = value |
def get_topic(self):
""" Returns the topic to consider. """
if not hasattr(self, 'topic'):
self.topic = get_object_or_404(
Topic.objects.select_related('forum').all(), pk=self.kwargs['pk'],
)
return self.topic | def function[get_topic, parameter[self]]:
constant[ Returns the topic to consider. ]
if <ast.UnaryOp object at 0x7da2044c2200> begin[:]
name[self].topic assign[=] call[name[get_object_or_404], parameter[call[call[name[Topic].objects.select_related, parameter[constant[forum]]].all, parameter[]]]]
return[name[self].topic] | keyword[def] identifier[get_topic] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[topic] = identifier[get_object_or_404] (
identifier[Topic] . identifier[objects] . identifier[select_related] ( literal[string] ). identifier[all] (), identifier[pk] = identifier[self] . identifier[kwargs] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[topic] | def get_topic(self):
""" Returns the topic to consider. """
if not hasattr(self, 'topic'):
self.topic = get_object_or_404(Topic.objects.select_related('forum').all(), pk=self.kwargs['pk']) # depends on [control=['if'], data=[]]
return self.topic |
def remove(self, node, dirty=True):
"""Remove the given child node.
Args:
node (gkeepapi.Node): Node to remove.
dirty (bool): Whether this node should be marked dirty.
"""
if node.id in self._children:
self._children[node.id].parent = None
del self._children[node.id]
if dirty:
self.touch() | def function[remove, parameter[self, node, dirty]]:
constant[Remove the given child node.
Args:
node (gkeepapi.Node): Node to remove.
dirty (bool): Whether this node should be marked dirty.
]
if compare[name[node].id in name[self]._children] begin[:]
call[name[self]._children][name[node].id].parent assign[=] constant[None]
<ast.Delete object at 0x7da1b2344b50>
if name[dirty] begin[:]
call[name[self].touch, parameter[]] | keyword[def] identifier[remove] ( identifier[self] , identifier[node] , identifier[dirty] = keyword[True] ):
literal[string]
keyword[if] identifier[node] . identifier[id] keyword[in] identifier[self] . identifier[_children] :
identifier[self] . identifier[_children] [ identifier[node] . identifier[id] ]. identifier[parent] = keyword[None]
keyword[del] identifier[self] . identifier[_children] [ identifier[node] . identifier[id] ]
keyword[if] identifier[dirty] :
identifier[self] . identifier[touch] () | def remove(self, node, dirty=True):
"""Remove the given child node.
Args:
node (gkeepapi.Node): Node to remove.
dirty (bool): Whether this node should be marked dirty.
"""
if node.id in self._children:
self._children[node.id].parent = None
del self._children[node.id] # depends on [control=['if'], data=[]]
if dirty:
self.touch() # depends on [control=['if'], data=[]] |
def get_mchirp(h5group):
"""Calculate the chipr mass column for this PyCBC HDF5 table group
"""
mass1 = h5group['mass1'][:]
mass2 = h5group['mass2'][:]
return (mass1 * mass2) ** (3/5.) / (mass1 + mass2) ** (1/5.) | def function[get_mchirp, parameter[h5group]]:
constant[Calculate the chipr mass column for this PyCBC HDF5 table group
]
variable[mass1] assign[=] call[call[name[h5group]][constant[mass1]]][<ast.Slice object at 0x7da204565e40>]
variable[mass2] assign[=] call[call[name[h5group]][constant[mass2]]][<ast.Slice object at 0x7da204565180>]
return[binary_operation[binary_operation[binary_operation[name[mass1] * name[mass2]] ** binary_operation[constant[3] / constant[5.0]]] / binary_operation[binary_operation[name[mass1] + name[mass2]] ** binary_operation[constant[1] / constant[5.0]]]]] | keyword[def] identifier[get_mchirp] ( identifier[h5group] ):
literal[string]
identifier[mass1] = identifier[h5group] [ literal[string] ][:]
identifier[mass2] = identifier[h5group] [ literal[string] ][:]
keyword[return] ( identifier[mass1] * identifier[mass2] )**( literal[int] / literal[int] )/( identifier[mass1] + identifier[mass2] )**( literal[int] / literal[int] ) | def get_mchirp(h5group):
"""Calculate the chipr mass column for this PyCBC HDF5 table group
"""
mass1 = h5group['mass1'][:]
mass2 = h5group['mass2'][:]
return (mass1 * mass2) ** (3 / 5.0) / (mass1 + mass2) ** (1 / 5.0) |
def admin_obj_link(obj, display=''):
"""Returns a link to the django admin change list with a filter set to
only the object given.
:param obj:
Object to create the admin change list display link for
:param display:
Text to display in the link. Defaults to string call of the object
:returns:
Text containing HTML for a link
"""
# get the url for the change list for this object
url = reverse('admin:%s_%s_changelist' % (obj._meta.app_label,
obj._meta.model_name))
url += '?id__exact=%s' % obj.id
text = str(obj)
if display:
text = display
return format_html('<a href="{}">{}</a>', url, text) | def function[admin_obj_link, parameter[obj, display]]:
constant[Returns a link to the django admin change list with a filter set to
only the object given.
:param obj:
Object to create the admin change list display link for
:param display:
Text to display in the link. Defaults to string call of the object
:returns:
Text containing HTML for a link
]
variable[url] assign[=] call[name[reverse], parameter[binary_operation[constant[admin:%s_%s_changelist] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2054a5d80>, <ast.Attribute object at 0x7da2054a56f0>]]]]]
<ast.AugAssign object at 0x7da2054a5750>
variable[text] assign[=] call[name[str], parameter[name[obj]]]
if name[display] begin[:]
variable[text] assign[=] name[display]
return[call[name[format_html], parameter[constant[<a href="{}">{}</a>], name[url], name[text]]]] | keyword[def] identifier[admin_obj_link] ( identifier[obj] , identifier[display] = literal[string] ):
literal[string]
identifier[url] = identifier[reverse] ( literal[string] %( identifier[obj] . identifier[_meta] . identifier[app_label] ,
identifier[obj] . identifier[_meta] . identifier[model_name] ))
identifier[url] += literal[string] % identifier[obj] . identifier[id]
identifier[text] = identifier[str] ( identifier[obj] )
keyword[if] identifier[display] :
identifier[text] = identifier[display]
keyword[return] identifier[format_html] ( literal[string] , identifier[url] , identifier[text] ) | def admin_obj_link(obj, display=''):
"""Returns a link to the django admin change list with a filter set to
only the object given.
:param obj:
Object to create the admin change list display link for
:param display:
Text to display in the link. Defaults to string call of the object
:returns:
Text containing HTML for a link
"""
# get the url for the change list for this object
url = reverse('admin:%s_%s_changelist' % (obj._meta.app_label, obj._meta.model_name))
url += '?id__exact=%s' % obj.id
text = str(obj)
if display:
text = display # depends on [control=['if'], data=[]]
return format_html('<a href="{}">{}</a>', url, text) |
def get_sensor_data(self):
"""Get sensor reading objects
Iterates sensor reading objects pertaining to the currently
managed BMC.
:returns: Iterator of sdr.SensorReading objects
"""
self.init_sdr()
for sensor in self._sdr.get_sensor_numbers():
rsp = self.raw_command(command=0x2d, netfn=4, data=(sensor,))
if 'error' in rsp:
if rsp['code'] == 203: # Sensor does not exist, optional dev
continue
raise exc.IpmiException(rsp['error'], code=rsp['code'])
yield self._sdr.sensors[sensor].decode_sensor_reading(rsp['data'])
self.oem_init()
for reading in self._oem.get_sensor_data():
yield reading | def function[get_sensor_data, parameter[self]]:
constant[Get sensor reading objects
Iterates sensor reading objects pertaining to the currently
managed BMC.
:returns: Iterator of sdr.SensorReading objects
]
call[name[self].init_sdr, parameter[]]
for taget[name[sensor]] in starred[call[name[self]._sdr.get_sensor_numbers, parameter[]]] begin[:]
variable[rsp] assign[=] call[name[self].raw_command, parameter[]]
if compare[constant[error] in name[rsp]] begin[:]
if compare[call[name[rsp]][constant[code]] equal[==] constant[203]] begin[:]
continue
<ast.Raise object at 0x7da20e955660>
<ast.Yield object at 0x7da18dc043a0>
call[name[self].oem_init, parameter[]]
for taget[name[reading]] in starred[call[name[self]._oem.get_sensor_data, parameter[]]] begin[:]
<ast.Yield object at 0x7da18dc04b50> | keyword[def] identifier[get_sensor_data] ( identifier[self] ):
literal[string]
identifier[self] . identifier[init_sdr] ()
keyword[for] identifier[sensor] keyword[in] identifier[self] . identifier[_sdr] . identifier[get_sensor_numbers] ():
identifier[rsp] = identifier[self] . identifier[raw_command] ( identifier[command] = literal[int] , identifier[netfn] = literal[int] , identifier[data] =( identifier[sensor] ,))
keyword[if] literal[string] keyword[in] identifier[rsp] :
keyword[if] identifier[rsp] [ literal[string] ]== literal[int] :
keyword[continue]
keyword[raise] identifier[exc] . identifier[IpmiException] ( identifier[rsp] [ literal[string] ], identifier[code] = identifier[rsp] [ literal[string] ])
keyword[yield] identifier[self] . identifier[_sdr] . identifier[sensors] [ identifier[sensor] ]. identifier[decode_sensor_reading] ( identifier[rsp] [ literal[string] ])
identifier[self] . identifier[oem_init] ()
keyword[for] identifier[reading] keyword[in] identifier[self] . identifier[_oem] . identifier[get_sensor_data] ():
keyword[yield] identifier[reading] | def get_sensor_data(self):
"""Get sensor reading objects
Iterates sensor reading objects pertaining to the currently
managed BMC.
:returns: Iterator of sdr.SensorReading objects
"""
self.init_sdr()
for sensor in self._sdr.get_sensor_numbers():
rsp = self.raw_command(command=45, netfn=4, data=(sensor,))
if 'error' in rsp:
if rsp['code'] == 203: # Sensor does not exist, optional dev
continue # depends on [control=['if'], data=[]]
raise exc.IpmiException(rsp['error'], code=rsp['code']) # depends on [control=['if'], data=['rsp']]
yield self._sdr.sensors[sensor].decode_sensor_reading(rsp['data']) # depends on [control=['for'], data=['sensor']]
self.oem_init()
for reading in self._oem.get_sensor_data():
yield reading # depends on [control=['for'], data=['reading']] |
def encode(self) -> str:
"""
Create a token based on the data held in the class.
:return: A new token
:rtype: str
"""
payload = {}
payload.update(self.registered_claims)
payload.update(self.payload)
return encode(self.secret, payload, self.alg, self.header) | def function[encode, parameter[self]]:
constant[
Create a token based on the data held in the class.
:return: A new token
:rtype: str
]
variable[payload] assign[=] dictionary[[], []]
call[name[payload].update, parameter[name[self].registered_claims]]
call[name[payload].update, parameter[name[self].payload]]
return[call[name[encode], parameter[name[self].secret, name[payload], name[self].alg, name[self].header]]] | keyword[def] identifier[encode] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[payload] ={}
identifier[payload] . identifier[update] ( identifier[self] . identifier[registered_claims] )
identifier[payload] . identifier[update] ( identifier[self] . identifier[payload] )
keyword[return] identifier[encode] ( identifier[self] . identifier[secret] , identifier[payload] , identifier[self] . identifier[alg] , identifier[self] . identifier[header] ) | def encode(self) -> str:
"""
Create a token based on the data held in the class.
:return: A new token
:rtype: str
"""
payload = {}
payload.update(self.registered_claims)
payload.update(self.payload)
return encode(self.secret, payload, self.alg, self.header) |
def paste(self, *args):
""" Usage: paste([PSMRL], text)
If a pattern is specified, the pattern is clicked first. Doesn't support text paths.
``text`` is pasted as is using the OS paste shortcut (Ctrl+V for Windows/Linux, Cmd+V
for OS X). Note that `paste()` does NOT use special formatting like `type()`.
"""
target = None
text = ""
if len(args) == 1 and isinstance(args[0], basestring):
text = args[0]
elif len(args) == 2 and isinstance(args[1], basestring):
self.click(target)
text = args[1]
else:
raise TypeError("paste method expected [PSMRL], text")
pyperclip.copy(text)
# Triggers OS paste for foreground window
PlatformManager.osPaste()
time.sleep(0.2) | def function[paste, parameter[self]]:
constant[ Usage: paste([PSMRL], text)
If a pattern is specified, the pattern is clicked first. Doesn't support text paths.
``text`` is pasted as is using the OS paste shortcut (Ctrl+V for Windows/Linux, Cmd+V
for OS X). Note that `paste()` does NOT use special formatting like `type()`.
]
variable[target] assign[=] constant[None]
variable[text] assign[=] constant[]
if <ast.BoolOp object at 0x7da18c4cc730> begin[:]
variable[text] assign[=] call[name[args]][constant[0]]
call[name[pyperclip].copy, parameter[name[text]]]
call[name[PlatformManager].osPaste, parameter[]]
call[name[time].sleep, parameter[constant[0.2]]] | keyword[def] identifier[paste] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[target] = keyword[None]
identifier[text] = literal[string]
keyword[if] identifier[len] ( identifier[args] )== literal[int] keyword[and] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[basestring] ):
identifier[text] = identifier[args] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[args] )== literal[int] keyword[and] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[basestring] ):
identifier[self] . identifier[click] ( identifier[target] )
identifier[text] = identifier[args] [ literal[int] ]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[pyperclip] . identifier[copy] ( identifier[text] )
identifier[PlatformManager] . identifier[osPaste] ()
identifier[time] . identifier[sleep] ( literal[int] ) | def paste(self, *args):
""" Usage: paste([PSMRL], text)
If a pattern is specified, the pattern is clicked first. Doesn't support text paths.
``text`` is pasted as is using the OS paste shortcut (Ctrl+V for Windows/Linux, Cmd+V
for OS X). Note that `paste()` does NOT use special formatting like `type()`.
"""
target = None
text = ''
if len(args) == 1 and isinstance(args[0], basestring):
text = args[0] # depends on [control=['if'], data=[]]
elif len(args) == 2 and isinstance(args[1], basestring):
self.click(target)
text = args[1] # depends on [control=['if'], data=[]]
else:
raise TypeError('paste method expected [PSMRL], text')
pyperclip.copy(text)
# Triggers OS paste for foreground window
PlatformManager.osPaste()
time.sleep(0.2) |
def step3(expnums, ccd, version, rate_min,
rate_max, angle, width, field=None, prefix=None, dry_run=False,
maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0):
"""run the actual step3 on the given exp/ccd combo"""
jmp_args = ['step3jmp']
matt_args = ['step3jjk']
idx = 0
cmd_args = []
for expnum in expnums:
idx += 1
for ext in ['unid.jmp', 'unid.matt']:
storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix)
image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0]
cmd_args.append('-f%d' % idx)
cmd_args.append(image)
cmd_args.extend(['-rn', str(rate_min),
'-rx', str(rate_max),
'-a', str(angle),
'-w', str(width)])
jmp_args.extend(cmd_args)
# Add some extra arguemnents for the ISO search.
cmd_args.extend(['-fr', str(maximum_flux_ratio),
'-ma', str(minimum_area),
'-mf', str(minimum_median_flux)])
matt_args.extend(cmd_args)
logging.info(util.exec_prog(jmp_args))
logging.info(util.exec_prog(matt_args))
if dry_run:
return
if field is None:
field = str(expnums[0])
storage.mkdir(os.path.dirname(storage.get_uri(field,
ccd=ccd,
version=version,
prefix=prefix)))
for ext in ['moving.jmp', 'moving.matt']:
uri = storage.get_uri(field,
ccd=ccd,
version=version,
ext=ext,
prefix=prefix)
filename = '%s%d%s%s.%s' % (prefix, expnums[0],
version,
str(ccd).zfill(2),
ext)
storage.copy(filename, uri)
return | def function[step3, parameter[expnums, ccd, version, rate_min, rate_max, angle, width, field, prefix, dry_run, maximum_flux_ratio, minimum_area, minimum_median_flux]]:
constant[run the actual step3 on the given exp/ccd combo]
variable[jmp_args] assign[=] list[[<ast.Constant object at 0x7da1b1a48940>]]
variable[matt_args] assign[=] list[[<ast.Constant object at 0x7da1b1a49e10>]]
variable[idx] assign[=] constant[0]
variable[cmd_args] assign[=] list[[]]
for taget[name[expnum]] in starred[name[expnums]] begin[:]
<ast.AugAssign object at 0x7da1b1a49840>
for taget[name[ext]] in starred[list[[<ast.Constant object at 0x7da1b1a4ac20>, <ast.Constant object at 0x7da1b1a49630>]]] begin[:]
call[name[storage].get_file, parameter[name[expnum]]]
variable[image] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[call[name[storage].get_uri, parameter[name[expnum], name[ccd]]]]]]]][constant[0]]
call[name[cmd_args].append, parameter[binary_operation[constant[-f%d] <ast.Mod object at 0x7da2590d6920> name[idx]]]]
call[name[cmd_args].append, parameter[name[image]]]
call[name[cmd_args].extend, parameter[list[[<ast.Constant object at 0x7da1b1b0c190>, <ast.Call object at 0x7da1b1b0dc30>, <ast.Constant object at 0x7da1b1b0c880>, <ast.Call object at 0x7da1b1b0f940>, <ast.Constant object at 0x7da1b1b0c8e0>, <ast.Call object at 0x7da1b1b0f430>, <ast.Constant object at 0x7da1b1b0e1d0>, <ast.Call object at 0x7da1b1b0cac0>]]]]
call[name[jmp_args].extend, parameter[name[cmd_args]]]
call[name[cmd_args].extend, parameter[list[[<ast.Constant object at 0x7da1b1b0c640>, <ast.Call object at 0x7da1b1b0c580>, <ast.Constant object at 0x7da1b1b0eaa0>, <ast.Call object at 0x7da1b1b0ee60>, <ast.Constant object at 0x7da1b1b0dff0>, <ast.Call object at 0x7da1b191cfd0>]]]]
call[name[matt_args].extend, parameter[name[cmd_args]]]
call[name[logging].info, parameter[call[name[util].exec_prog, parameter[name[jmp_args]]]]]
call[name[logging].info, parameter[call[name[util].exec_prog, parameter[name[matt_args]]]]]
if name[dry_run] begin[:]
return[None]
if compare[name[field] is constant[None]] begin[:]
variable[field] assign[=] call[name[str], parameter[call[name[expnums]][constant[0]]]]
call[name[storage].mkdir, parameter[call[name[os].path.dirname, parameter[call[name[storage].get_uri, parameter[name[field]]]]]]]
for taget[name[ext]] in starred[list[[<ast.Constant object at 0x7da1b191f520>, <ast.Constant object at 0x7da1b191dae0>]]] begin[:]
variable[uri] assign[=] call[name[storage].get_uri, parameter[name[field]]]
variable[filename] assign[=] binary_operation[constant[%s%d%s%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b191cb80>, <ast.Subscript object at 0x7da1b191c1c0>, <ast.Name object at 0x7da1b191c7c0>, <ast.Call object at 0x7da1b191f460>, <ast.Name object at 0x7da1b191d990>]]]
call[name[storage].copy, parameter[name[filename], name[uri]]]
return[None] | keyword[def] identifier[step3] ( identifier[expnums] , identifier[ccd] , identifier[version] , identifier[rate_min] ,
identifier[rate_max] , identifier[angle] , identifier[width] , identifier[field] = keyword[None] , identifier[prefix] = keyword[None] , identifier[dry_run] = keyword[False] ,
identifier[maximum_flux_ratio] = literal[int] , identifier[minimum_area] = literal[int] , identifier[minimum_median_flux] = literal[int] ):
literal[string]
identifier[jmp_args] =[ literal[string] ]
identifier[matt_args] =[ literal[string] ]
identifier[idx] = literal[int]
identifier[cmd_args] =[]
keyword[for] identifier[expnum] keyword[in] identifier[expnums] :
identifier[idx] += literal[int]
keyword[for] identifier[ext] keyword[in] [ literal[string] , literal[string] ]:
identifier[storage] . identifier[get_file] ( identifier[expnum] , identifier[ccd] = identifier[ccd] , identifier[version] = identifier[version] , identifier[ext] = identifier[ext] , identifier[prefix] = identifier[prefix] )
identifier[image] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[storage] . identifier[get_uri] ( identifier[expnum] , identifier[ccd] , identifier[version] = identifier[version] , identifier[prefix] = identifier[prefix] )))[ literal[int] ]
identifier[cmd_args] . identifier[append] ( literal[string] % identifier[idx] )
identifier[cmd_args] . identifier[append] ( identifier[image] )
identifier[cmd_args] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[rate_min] ),
literal[string] , identifier[str] ( identifier[rate_max] ),
literal[string] , identifier[str] ( identifier[angle] ),
literal[string] , identifier[str] ( identifier[width] )])
identifier[jmp_args] . identifier[extend] ( identifier[cmd_args] )
identifier[cmd_args] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[maximum_flux_ratio] ),
literal[string] , identifier[str] ( identifier[minimum_area] ),
literal[string] , identifier[str] ( identifier[minimum_median_flux] )])
identifier[matt_args] . identifier[extend] ( identifier[cmd_args] )
identifier[logging] . identifier[info] ( identifier[util] . identifier[exec_prog] ( identifier[jmp_args] ))
identifier[logging] . identifier[info] ( identifier[util] . identifier[exec_prog] ( identifier[matt_args] ))
keyword[if] identifier[dry_run] :
keyword[return]
keyword[if] identifier[field] keyword[is] keyword[None] :
identifier[field] = identifier[str] ( identifier[expnums] [ literal[int] ])
identifier[storage] . identifier[mkdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[storage] . identifier[get_uri] ( identifier[field] ,
identifier[ccd] = identifier[ccd] ,
identifier[version] = identifier[version] ,
identifier[prefix] = identifier[prefix] )))
keyword[for] identifier[ext] keyword[in] [ literal[string] , literal[string] ]:
identifier[uri] = identifier[storage] . identifier[get_uri] ( identifier[field] ,
identifier[ccd] = identifier[ccd] ,
identifier[version] = identifier[version] ,
identifier[ext] = identifier[ext] ,
identifier[prefix] = identifier[prefix] )
identifier[filename] = literal[string] %( identifier[prefix] , identifier[expnums] [ literal[int] ],
identifier[version] ,
identifier[str] ( identifier[ccd] ). identifier[zfill] ( literal[int] ),
identifier[ext] )
identifier[storage] . identifier[copy] ( identifier[filename] , identifier[uri] )
keyword[return] | def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0):
"""run the actual step3 on the given exp/ccd combo"""
jmp_args = ['step3jmp']
matt_args = ['step3jjk']
idx = 0
cmd_args = []
for expnum in expnums:
idx += 1
for ext in ['unid.jmp', 'unid.matt']:
storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) # depends on [control=['for'], data=['ext']]
image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0]
cmd_args.append('-f%d' % idx)
cmd_args.append(image) # depends on [control=['for'], data=['expnum']]
cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)])
jmp_args.extend(cmd_args)
# Add some extra arguemnents for the ISO search.
cmd_args.extend(['-fr', str(maximum_flux_ratio), '-ma', str(minimum_area), '-mf', str(minimum_median_flux)])
matt_args.extend(cmd_args)
logging.info(util.exec_prog(jmp_args))
logging.info(util.exec_prog(matt_args))
if dry_run:
return # depends on [control=['if'], data=[]]
if field is None:
field = str(expnums[0]) # depends on [control=['if'], data=['field']]
storage.mkdir(os.path.dirname(storage.get_uri(field, ccd=ccd, version=version, prefix=prefix)))
for ext in ['moving.jmp', 'moving.matt']:
uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix)
filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext)
storage.copy(filename, uri) # depends on [control=['for'], data=['ext']]
return |
def SPI_write(self, chip_select, data):
'Writes data to SPI device selected by chipselect bit. '
dat = list(data)
dat.insert(0, chip_select)
return self.bus.write_i2c_block(self.address, dat); | def function[SPI_write, parameter[self, chip_select, data]]:
constant[Writes data to SPI device selected by chipselect bit. ]
variable[dat] assign[=] call[name[list], parameter[name[data]]]
call[name[dat].insert, parameter[constant[0], name[chip_select]]]
return[call[name[self].bus.write_i2c_block, parameter[name[self].address, name[dat]]]] | keyword[def] identifier[SPI_write] ( identifier[self] , identifier[chip_select] , identifier[data] ):
literal[string]
identifier[dat] = identifier[list] ( identifier[data] )
identifier[dat] . identifier[insert] ( literal[int] , identifier[chip_select] )
keyword[return] identifier[self] . identifier[bus] . identifier[write_i2c_block] ( identifier[self] . identifier[address] , identifier[dat] ); | def SPI_write(self, chip_select, data):
"""Writes data to SPI device selected by chipselect bit. """
dat = list(data)
dat.insert(0, chip_select)
return self.bus.write_i2c_block(self.address, dat) |
def listar_por_marca(self, id_brand):
"""List all Model by Brand.
:param id_brand: Identifier of the Brand. Integer value and greater than zero.
:return: Dictionary with the following structure:
::
{‘model’: [{‘id’: < id >,
‘nome’: < nome >,
‘id_marca’: < id_marca >}, ... too Model ...]}
:raise InvalidParameterError: The identifier of Brand is null and invalid.
:raise MarcaNaoExisteError: Brand not registered.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response
"""
if not is_valid_int_param(id_brand):
raise InvalidParameterError(
u'The identifier of Brand is invalid or was not informed.')
url = 'model/brand/' + str(id_brand) + '/'
code, map = self.submit(None, 'GET', url)
key = 'model'
return get_list_map(self.response(code, map, [key]), key) | def function[listar_por_marca, parameter[self, id_brand]]:
constant[List all Model by Brand.
:param id_brand: Identifier of the Brand. Integer value and greater than zero.
:return: Dictionary with the following structure:
::
{‘model’: [{‘id’: < id >,
‘nome’: < nome >,
‘id_marca’: < id_marca >}, ... too Model ...]}
:raise InvalidParameterError: The identifier of Brand is null and invalid.
:raise MarcaNaoExisteError: Brand not registered.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response
]
if <ast.UnaryOp object at 0x7da1b23458a0> begin[:]
<ast.Raise object at 0x7da1b2347bb0>
variable[url] assign[=] binary_operation[binary_operation[constant[model/brand/] + call[name[str], parameter[name[id_brand]]]] + constant[/]]
<ast.Tuple object at 0x7da1b2344610> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]]
variable[key] assign[=] constant[model]
return[call[name[get_list_map], parameter[call[name[self].response, parameter[name[code], name[map], list[[<ast.Name object at 0x7da20c7c81f0>]]]], name[key]]]] | keyword[def] identifier[listar_por_marca] ( identifier[self] , identifier[id_brand] ):
literal[string]
keyword[if] keyword[not] identifier[is_valid_int_param] ( identifier[id_brand] ):
keyword[raise] identifier[InvalidParameterError] (
literal[string] )
identifier[url] = literal[string] + identifier[str] ( identifier[id_brand] )+ literal[string]
identifier[code] , identifier[map] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] )
identifier[key] = literal[string]
keyword[return] identifier[get_list_map] ( identifier[self] . identifier[response] ( identifier[code] , identifier[map] ,[ identifier[key] ]), identifier[key] ) | def listar_por_marca(self, id_brand):
"""List all Model by Brand.
:param id_brand: Identifier of the Brand. Integer value and greater than zero.
:return: Dictionary with the following structure:
::
{‘model’: [{‘id’: < id >,
‘nome’: < nome >,
‘id_marca’: < id_marca >}, ... too Model ...]}
:raise InvalidParameterError: The identifier of Brand is null and invalid.
:raise MarcaNaoExisteError: Brand not registered.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response
"""
if not is_valid_int_param(id_brand):
raise InvalidParameterError(u'The identifier of Brand is invalid or was not informed.') # depends on [control=['if'], data=[]]
url = 'model/brand/' + str(id_brand) + '/'
(code, map) = self.submit(None, 'GET', url)
key = 'model'
return get_list_map(self.response(code, map, [key]), key) |
def _filter_response(self, response_dict):
""" Add additional filters to the response dictionary
Currently the response dictionary is filtered like this:
* If a list only has one item, the list is replaced by that item
* Namespace-Keys (_jsns and xmlns) are removed
:param response_dict: the pregenerated, but unfiltered response dict
:type response_dict: dict
:return: The filtered dictionary
:rtype: dict
"""
filtered_dict = {}
for key, value in response_dict.items():
if key == "_jsns":
continue
if key == "xmlns":
continue
if type(value) == list and len(value) == 1:
filtered_dict[key] = value[0]
elif type(value) == dict and len(value.keys()) == 1 and "_content" \
in value.keys():
filtered_dict[key] = value["_content"]
elif type(value) == dict:
tmp_dict = self._filter_response(value)
filtered_dict[key] = tmp_dict
else:
filtered_dict[key] = value
return filtered_dict | def function[_filter_response, parameter[self, response_dict]]:
constant[ Add additional filters to the response dictionary
Currently the response dictionary is filtered like this:
* If a list only has one item, the list is replaced by that item
* Namespace-Keys (_jsns and xmlns) are removed
:param response_dict: the pregenerated, but unfiltered response dict
:type response_dict: dict
:return: The filtered dictionary
:rtype: dict
]
variable[filtered_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da207f9a800>, <ast.Name object at 0x7da207f98e20>]]] in starred[call[name[response_dict].items, parameter[]]] begin[:]
if compare[name[key] equal[==] constant[_jsns]] begin[:]
continue
if compare[name[key] equal[==] constant[xmlns]] begin[:]
continue
if <ast.BoolOp object at 0x7da207f9b010> begin[:]
call[name[filtered_dict]][name[key]] assign[=] call[name[value]][constant[0]]
return[name[filtered_dict]] | keyword[def] identifier[_filter_response] ( identifier[self] , identifier[response_dict] ):
literal[string]
identifier[filtered_dict] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[response_dict] . identifier[items] ():
keyword[if] identifier[key] == literal[string] :
keyword[continue]
keyword[if] identifier[key] == literal[string] :
keyword[continue]
keyword[if] identifier[type] ( identifier[value] )== identifier[list] keyword[and] identifier[len] ( identifier[value] )== literal[int] :
identifier[filtered_dict] [ identifier[key] ]= identifier[value] [ literal[int] ]
keyword[elif] identifier[type] ( identifier[value] )== identifier[dict] keyword[and] identifier[len] ( identifier[value] . identifier[keys] ())== literal[int] keyword[and] literal[string] keyword[in] identifier[value] . identifier[keys] ():
identifier[filtered_dict] [ identifier[key] ]= identifier[value] [ literal[string] ]
keyword[elif] identifier[type] ( identifier[value] )== identifier[dict] :
identifier[tmp_dict] = identifier[self] . identifier[_filter_response] ( identifier[value] )
identifier[filtered_dict] [ identifier[key] ]= identifier[tmp_dict]
keyword[else] :
identifier[filtered_dict] [ identifier[key] ]= identifier[value]
keyword[return] identifier[filtered_dict] | def _filter_response(self, response_dict):
""" Add additional filters to the response dictionary
Currently the response dictionary is filtered like this:
* If a list only has one item, the list is replaced by that item
* Namespace-Keys (_jsns and xmlns) are removed
:param response_dict: the pregenerated, but unfiltered response dict
:type response_dict: dict
:return: The filtered dictionary
:rtype: dict
"""
filtered_dict = {}
for (key, value) in response_dict.items():
if key == '_jsns':
continue # depends on [control=['if'], data=[]]
if key == 'xmlns':
continue # depends on [control=['if'], data=[]]
if type(value) == list and len(value) == 1:
filtered_dict[key] = value[0] # depends on [control=['if'], data=[]]
elif type(value) == dict and len(value.keys()) == 1 and ('_content' in value.keys()):
filtered_dict[key] = value['_content'] # depends on [control=['if'], data=[]]
elif type(value) == dict:
tmp_dict = self._filter_response(value)
filtered_dict[key] = tmp_dict # depends on [control=['if'], data=[]]
else:
filtered_dict[key] = value # depends on [control=['for'], data=[]]
return filtered_dict |
def check_pre_requirements(pre_requirements):
"""Check all necessary system requirements to exist.
:param pre_requirements:
Sequence of pre-requirements to check by running
``where <pre_requirement>`` on Windows and ``which ...`` elsewhere.
"""
pre_requirements = set(pre_requirements or [])
pre_requirements.add('virtualenv')
for requirement in pre_requirements:
if not which(requirement):
print_error('Requirement {0!r} is not found in system'.
format(requirement))
return False
return True | def function[check_pre_requirements, parameter[pre_requirements]]:
constant[Check all necessary system requirements to exist.
:param pre_requirements:
Sequence of pre-requirements to check by running
``where <pre_requirement>`` on Windows and ``which ...`` elsewhere.
]
variable[pre_requirements] assign[=] call[name[set], parameter[<ast.BoolOp object at 0x7da1b008ce20>]]
call[name[pre_requirements].add, parameter[constant[virtualenv]]]
for taget[name[requirement]] in starred[name[pre_requirements]] begin[:]
if <ast.UnaryOp object at 0x7da1b008ce80> begin[:]
call[name[print_error], parameter[call[constant[Requirement {0!r} is not found in system].format, parameter[name[requirement]]]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[check_pre_requirements] ( identifier[pre_requirements] ):
literal[string]
identifier[pre_requirements] = identifier[set] ( identifier[pre_requirements] keyword[or] [])
identifier[pre_requirements] . identifier[add] ( literal[string] )
keyword[for] identifier[requirement] keyword[in] identifier[pre_requirements] :
keyword[if] keyword[not] identifier[which] ( identifier[requirement] ):
identifier[print_error] ( literal[string] .
identifier[format] ( identifier[requirement] ))
keyword[return] keyword[False]
keyword[return] keyword[True] | def check_pre_requirements(pre_requirements):
"""Check all necessary system requirements to exist.
:param pre_requirements:
Sequence of pre-requirements to check by running
``where <pre_requirement>`` on Windows and ``which ...`` elsewhere.
"""
pre_requirements = set(pre_requirements or [])
pre_requirements.add('virtualenv')
for requirement in pre_requirements:
if not which(requirement):
print_error('Requirement {0!r} is not found in system'.format(requirement))
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['requirement']]
return True |
def get_function_id(sig):
''''
Return the function id of the given signature
Args:
sig (str)
Return:
(int)
'''
s = sha3.keccak_256()
s.update(sig.encode('utf-8'))
return int("0x" + s.hexdigest()[:8], 16) | def function[get_function_id, parameter[sig]]:
constant['
Return the function id of the given signature
Args:
sig (str)
Return:
(int)
]
variable[s] assign[=] call[name[sha3].keccak_256, parameter[]]
call[name[s].update, parameter[call[name[sig].encode, parameter[constant[utf-8]]]]]
return[call[name[int], parameter[binary_operation[constant[0x] + call[call[name[s].hexdigest, parameter[]]][<ast.Slice object at 0x7da18f8118d0>]], constant[16]]]] | keyword[def] identifier[get_function_id] ( identifier[sig] ):
literal[string]
identifier[s] = identifier[sha3] . identifier[keccak_256] ()
identifier[s] . identifier[update] ( identifier[sig] . identifier[encode] ( literal[string] ))
keyword[return] identifier[int] ( literal[string] + identifier[s] . identifier[hexdigest] ()[: literal[int] ], literal[int] ) | def get_function_id(sig):
"""'
Return the function id of the given signature
Args:
sig (str)
Return:
(int)
"""
s = sha3.keccak_256()
s.update(sig.encode('utf-8'))
return int('0x' + s.hexdigest()[:8], 16) |
def recent(self, check_language=True, language=None, limit=3, exclude=None,
kwargs=None, category=None):
"""
Returns recently published new entries.
"""
if category:
if not kwargs:
kwargs = {}
kwargs['categories__in'] = [category]
qs = self.published(check_language=check_language, language=language,
kwargs=kwargs)
if exclude:
qs = qs.exclude(pk=exclude.pk)
return qs[:limit] | def function[recent, parameter[self, check_language, language, limit, exclude, kwargs, category]]:
constant[
Returns recently published new entries.
]
if name[category] begin[:]
if <ast.UnaryOp object at 0x7da1b0e601c0> begin[:]
variable[kwargs] assign[=] dictionary[[], []]
call[name[kwargs]][constant[categories__in]] assign[=] list[[<ast.Name object at 0x7da1b0e61600>]]
variable[qs] assign[=] call[name[self].published, parameter[]]
if name[exclude] begin[:]
variable[qs] assign[=] call[name[qs].exclude, parameter[]]
return[call[name[qs]][<ast.Slice object at 0x7da1b0e614b0>]] | keyword[def] identifier[recent] ( identifier[self] , identifier[check_language] = keyword[True] , identifier[language] = keyword[None] , identifier[limit] = literal[int] , identifier[exclude] = keyword[None] ,
identifier[kwargs] = keyword[None] , identifier[category] = keyword[None] ):
literal[string]
keyword[if] identifier[category] :
keyword[if] keyword[not] identifier[kwargs] :
identifier[kwargs] ={}
identifier[kwargs] [ literal[string] ]=[ identifier[category] ]
identifier[qs] = identifier[self] . identifier[published] ( identifier[check_language] = identifier[check_language] , identifier[language] = identifier[language] ,
identifier[kwargs] = identifier[kwargs] )
keyword[if] identifier[exclude] :
identifier[qs] = identifier[qs] . identifier[exclude] ( identifier[pk] = identifier[exclude] . identifier[pk] )
keyword[return] identifier[qs] [: identifier[limit] ] | def recent(self, check_language=True, language=None, limit=3, exclude=None, kwargs=None, category=None):
"""
Returns recently published new entries.
"""
if category:
if not kwargs:
kwargs = {} # depends on [control=['if'], data=[]]
kwargs['categories__in'] = [category] # depends on [control=['if'], data=[]]
qs = self.published(check_language=check_language, language=language, kwargs=kwargs)
if exclude:
qs = qs.exclude(pk=exclude.pk) # depends on [control=['if'], data=[]]
return qs[:limit] |
def parse_verbosity(self, args):
'''parse_verbosity will take an argument object, and return the args
passed (from a dictionary) to a list
Parameters
==========
args: the argparse argument objects
'''
flags = []
if args.silent is True:
flags.append('--silent')
elif args.quiet is True:
flags.append('--quiet')
elif args.debug is True:
flags.append('--debug')
elif args.verbose is True:
flags.append('-' + 'v' * args.verbose)
return flags | def function[parse_verbosity, parameter[self, args]]:
constant[parse_verbosity will take an argument object, and return the args
passed (from a dictionary) to a list
Parameters
==========
args: the argparse argument objects
]
variable[flags] assign[=] list[[]]
if compare[name[args].silent is constant[True]] begin[:]
call[name[flags].append, parameter[constant[--silent]]]
return[name[flags]] | keyword[def] identifier[parse_verbosity] ( identifier[self] , identifier[args] ):
literal[string]
identifier[flags] =[]
keyword[if] identifier[args] . identifier[silent] keyword[is] keyword[True] :
identifier[flags] . identifier[append] ( literal[string] )
keyword[elif] identifier[args] . identifier[quiet] keyword[is] keyword[True] :
identifier[flags] . identifier[append] ( literal[string] )
keyword[elif] identifier[args] . identifier[debug] keyword[is] keyword[True] :
identifier[flags] . identifier[append] ( literal[string] )
keyword[elif] identifier[args] . identifier[verbose] keyword[is] keyword[True] :
identifier[flags] . identifier[append] ( literal[string] + literal[string] * identifier[args] . identifier[verbose] )
keyword[return] identifier[flags] | def parse_verbosity(self, args):
"""parse_verbosity will take an argument object, and return the args
passed (from a dictionary) to a list
Parameters
==========
args: the argparse argument objects
"""
flags = []
if args.silent is True:
flags.append('--silent') # depends on [control=['if'], data=[]]
elif args.quiet is True:
flags.append('--quiet') # depends on [control=['if'], data=[]]
elif args.debug is True:
flags.append('--debug') # depends on [control=['if'], data=[]]
elif args.verbose is True:
flags.append('-' + 'v' * args.verbose) # depends on [control=['if'], data=[]]
return flags |
def country(random=random, *args, **kwargs):
"""
Produce a country name
>>> mock_random.seed(0)
>>> country(random=mock_random)
'testasia'
>>> country(random=mock_random, capitalize=True)
'West Xanth'
>>> country(random=mock_random, slugify=True)
'westeros'
"""
return random.choice([
"{country}",
"{direction} {country}"
]).format(country=random.choice(countries),
direction=direction(random=random)) | def function[country, parameter[random]]:
constant[
Produce a country name
>>> mock_random.seed(0)
>>> country(random=mock_random)
'testasia'
>>> country(random=mock_random, capitalize=True)
'West Xanth'
>>> country(random=mock_random, slugify=True)
'westeros'
]
return[call[call[name[random].choice, parameter[list[[<ast.Constant object at 0x7da1b0ca54e0>, <ast.Constant object at 0x7da1b0ca53f0>]]]].format, parameter[]]] | keyword[def] identifier[country] ( identifier[random] = identifier[random] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[random] . identifier[choice] ([
literal[string] ,
literal[string]
]). identifier[format] ( identifier[country] = identifier[random] . identifier[choice] ( identifier[countries] ),
identifier[direction] = identifier[direction] ( identifier[random] = identifier[random] )) | def country(random=random, *args, **kwargs):
"""
Produce a country name
>>> mock_random.seed(0)
>>> country(random=mock_random)
'testasia'
>>> country(random=mock_random, capitalize=True)
'West Xanth'
>>> country(random=mock_random, slugify=True)
'westeros'
"""
return random.choice(['{country}', '{direction} {country}']).format(country=random.choice(countries), direction=direction(random=random)) |
def get_binom(base1, base2, estE, estH):
"""
return probability of base call
"""
prior_homo = (1. - estH) / 2.
prior_hete = estH
## calculate probs
bsum = base1 + base2
hetprob = scipy.misc.comb(bsum, base1)/(2. **(bsum))
homoa = scipy.stats.binom.pmf(base2, bsum, estE)
homob = scipy.stats.binom.pmf(base1, bsum, estE)
## calculate probs
hetprob *= prior_hete
homoa *= prior_homo
homob *= prior_homo
## final
probabilities = [homoa, homob, hetprob]
bestprob = max(probabilities)/float(sum(probabilities))
## return
if hetprob > homoa:
return True, bestprob
else:
return False, bestprob | def function[get_binom, parameter[base1, base2, estE, estH]]:
constant[
return probability of base call
]
variable[prior_homo] assign[=] binary_operation[binary_operation[constant[1.0] - name[estH]] / constant[2.0]]
variable[prior_hete] assign[=] name[estH]
variable[bsum] assign[=] binary_operation[name[base1] + name[base2]]
variable[hetprob] assign[=] binary_operation[call[name[scipy].misc.comb, parameter[name[bsum], name[base1]]] / binary_operation[constant[2.0] ** name[bsum]]]
variable[homoa] assign[=] call[name[scipy].stats.binom.pmf, parameter[name[base2], name[bsum], name[estE]]]
variable[homob] assign[=] call[name[scipy].stats.binom.pmf, parameter[name[base1], name[bsum], name[estE]]]
<ast.AugAssign object at 0x7da18dc9af80>
<ast.AugAssign object at 0x7da18dc99c30>
<ast.AugAssign object at 0x7da18dc982b0>
variable[probabilities] assign[=] list[[<ast.Name object at 0x7da18dc9a050>, <ast.Name object at 0x7da18dc9b0d0>, <ast.Name object at 0x7da18dc99150>]]
variable[bestprob] assign[=] binary_operation[call[name[max], parameter[name[probabilities]]] / call[name[float], parameter[call[name[sum], parameter[name[probabilities]]]]]]
if compare[name[hetprob] greater[>] name[homoa]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b004f670>, <ast.Name object at 0x7da1b004f610>]]] | keyword[def] identifier[get_binom] ( identifier[base1] , identifier[base2] , identifier[estE] , identifier[estH] ):
literal[string]
identifier[prior_homo] =( literal[int] - identifier[estH] )/ literal[int]
identifier[prior_hete] = identifier[estH]
identifier[bsum] = identifier[base1] + identifier[base2]
identifier[hetprob] = identifier[scipy] . identifier[misc] . identifier[comb] ( identifier[bsum] , identifier[base1] )/( literal[int] **( identifier[bsum] ))
identifier[homoa] = identifier[scipy] . identifier[stats] . identifier[binom] . identifier[pmf] ( identifier[base2] , identifier[bsum] , identifier[estE] )
identifier[homob] = identifier[scipy] . identifier[stats] . identifier[binom] . identifier[pmf] ( identifier[base1] , identifier[bsum] , identifier[estE] )
identifier[hetprob] *= identifier[prior_hete]
identifier[homoa] *= identifier[prior_homo]
identifier[homob] *= identifier[prior_homo]
identifier[probabilities] =[ identifier[homoa] , identifier[homob] , identifier[hetprob] ]
identifier[bestprob] = identifier[max] ( identifier[probabilities] )/ identifier[float] ( identifier[sum] ( identifier[probabilities] ))
keyword[if] identifier[hetprob] > identifier[homoa] :
keyword[return] keyword[True] , identifier[bestprob]
keyword[else] :
keyword[return] keyword[False] , identifier[bestprob] | def get_binom(base1, base2, estE, estH):
"""
return probability of base call
"""
prior_homo = (1.0 - estH) / 2.0
prior_hete = estH
## calculate probs
bsum = base1 + base2
hetprob = scipy.misc.comb(bsum, base1) / 2.0 ** bsum
homoa = scipy.stats.binom.pmf(base2, bsum, estE)
homob = scipy.stats.binom.pmf(base1, bsum, estE)
## calculate probs
hetprob *= prior_hete
homoa *= prior_homo
homob *= prior_homo ## final
probabilities = [homoa, homob, hetprob]
bestprob = max(probabilities) / float(sum(probabilities))
## return
if hetprob > homoa:
return (True, bestprob) # depends on [control=['if'], data=[]]
else:
return (False, bestprob) |
def get_vault_admin_session(self):
"""Gets the OsidSession associated with the vault administration service.
return: (osid.authorization.VaultAdminSession) - a
``VaultAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_vault_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_vault_admin()`` is true.*
"""
if not self.supports_vault_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.VaultAdminSession(runtime=self._runtime) | def function[get_vault_admin_session, parameter[self]]:
constant[Gets the OsidSession associated with the vault administration service.
return: (osid.authorization.VaultAdminSession) - a
``VaultAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_vault_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_vault_admin()`` is true.*
]
if <ast.UnaryOp object at 0x7da18dc04df0> begin[:]
<ast.Raise object at 0x7da18dc05ff0>
return[call[name[sessions].VaultAdminSession, parameter[]]] | keyword[def] identifier[get_vault_admin_session] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_vault_admin] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[VaultAdminSession] ( identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_vault_admin_session(self):
"""Gets the OsidSession associated with the vault administration service.
return: (osid.authorization.VaultAdminSession) - a
``VaultAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_vault_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_vault_admin()`` is true.*
"""
if not self.supports_vault_admin():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.VaultAdminSession(runtime=self._runtime) |
def BatchRenorm(x, rmax, dmax, momentum=0.9, epsilon=1e-5,
center=True, scale=True, gamma_initializer=None,
data_format='channels_last'):
"""
Batch Renormalization layer, as described in the paper:
`Batch Renormalization: Towards Reducing Minibatch Dependence in Batch-Normalized Models
<https://arxiv.org/abs/1702.03275>`_.
This implementation is a wrapper around `tf.layers.batch_normalization`.
Args:
x (tf.Tensor): a NHWC or NC tensor.
rmax, dmax (tf.Tensor): a scalar tensor, the maximum allowed corrections.
decay (float): decay rate of moving average.
epsilon (float): epsilon to avoid divide-by-zero.
use_scale, use_bias (bool): whether to use the extra affine transformation or not.
Returns:
tf.Tensor: a tensor named ``output`` with the same shape of x.
Variable Names:
* ``beta``: the bias term.
* ``gamma``: the scale term. Input will be transformed by ``x * gamma + beta``.
* ``moving_mean, renorm_mean, renorm_mean_weight``: See TF documentation.
* ``moving_variance, renorm_stddev, renorm_stddev_weight``: See TF documentation.
"""
shape = x.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
if ndims == 2:
data_format = 'channels_first'
ctx = get_current_tower_context()
coll_bk = backup_collection([tf.GraphKeys.UPDATE_OPS])
layer = tf.layers.BatchNormalization(
axis=1 if data_format == 'channels_first' else 3,
momentum=momentum, epsilon=epsilon,
center=center, scale=scale,
renorm=True,
renorm_clipping={
'rmin': 1.0 / rmax,
'rmax': rmax,
'dmax': dmax},
renorm_momentum=0.99,
gamma_initializer=gamma_initializer,
fused=False,
_reuse=tf.get_variable_scope().reuse)
xn = layer.apply(x, training=ctx.is_training, scope=tf.get_variable_scope())
if ctx.is_main_training_tower:
for v in layer.non_trainable_variables:
if isinstance(v, tf.Variable):
tf.add_to_collection(tf.GraphKeys.MODEL_VARIABLES, v)
else:
# only run UPDATE_OPS in the first tower
restore_collection(coll_bk)
if ndims == 2:
xn = tf.squeeze(xn, [1, 2])
ret = tf.identity(xn, name='output')
# TODO not sure whether to add moving_mean/moving_var to VH now
vh = ret.variables = VariableHolder()
if scale:
vh.gamma = layer.gamma
if center:
vh.beta = layer.beta
return ret | def function[BatchRenorm, parameter[x, rmax, dmax, momentum, epsilon, center, scale, gamma_initializer, data_format]]:
constant[
Batch Renormalization layer, as described in the paper:
`Batch Renormalization: Towards Reducing Minibatch Dependence in Batch-Normalized Models
<https://arxiv.org/abs/1702.03275>`_.
This implementation is a wrapper around `tf.layers.batch_normalization`.
Args:
x (tf.Tensor): a NHWC or NC tensor.
rmax, dmax (tf.Tensor): a scalar tensor, the maximum allowed corrections.
decay (float): decay rate of moving average.
epsilon (float): epsilon to avoid divide-by-zero.
use_scale, use_bias (bool): whether to use the extra affine transformation or not.
Returns:
tf.Tensor: a tensor named ``output`` with the same shape of x.
Variable Names:
* ``beta``: the bias term.
* ``gamma``: the scale term. Input will be transformed by ``x * gamma + beta``.
* ``moving_mean, renorm_mean, renorm_mean_weight``: See TF documentation.
* ``moving_variance, renorm_stddev, renorm_stddev_weight``: See TF documentation.
]
variable[shape] assign[=] call[call[name[x].get_shape, parameter[]].as_list, parameter[]]
variable[ndims] assign[=] call[name[len], parameter[name[shape]]]
assert[compare[name[ndims] in list[[<ast.Constant object at 0x7da18f09fdf0>, <ast.Constant object at 0x7da18f09da80>]]]]
if compare[name[ndims] equal[==] constant[2]] begin[:]
variable[data_format] assign[=] constant[channels_first]
variable[ctx] assign[=] call[name[get_current_tower_context], parameter[]]
variable[coll_bk] assign[=] call[name[backup_collection], parameter[list[[<ast.Attribute object at 0x7da18f09dd20>]]]]
variable[layer] assign[=] call[name[tf].layers.BatchNormalization, parameter[]]
variable[xn] assign[=] call[name[layer].apply, parameter[name[x]]]
if name[ctx].is_main_training_tower begin[:]
for taget[name[v]] in starred[name[layer].non_trainable_variables] begin[:]
if call[name[isinstance], parameter[name[v], name[tf].Variable]] begin[:]
call[name[tf].add_to_collection, parameter[name[tf].GraphKeys.MODEL_VARIABLES, name[v]]]
if compare[name[ndims] equal[==] constant[2]] begin[:]
variable[xn] assign[=] call[name[tf].squeeze, parameter[name[xn], list[[<ast.Constant object at 0x7da18f09d630>, <ast.Constant object at 0x7da18f09f3d0>]]]]
variable[ret] assign[=] call[name[tf].identity, parameter[name[xn]]]
variable[vh] assign[=] call[name[VariableHolder], parameter[]]
if name[scale] begin[:]
name[vh].gamma assign[=] name[layer].gamma
if name[center] begin[:]
name[vh].beta assign[=] name[layer].beta
return[name[ret]] | keyword[def] identifier[BatchRenorm] ( identifier[x] , identifier[rmax] , identifier[dmax] , identifier[momentum] = literal[int] , identifier[epsilon] = literal[int] ,
identifier[center] = keyword[True] , identifier[scale] = keyword[True] , identifier[gamma_initializer] = keyword[None] ,
identifier[data_format] = literal[string] ):
literal[string]
identifier[shape] = identifier[x] . identifier[get_shape] (). identifier[as_list] ()
identifier[ndims] = identifier[len] ( identifier[shape] )
keyword[assert] identifier[ndims] keyword[in] [ literal[int] , literal[int] ]
keyword[if] identifier[ndims] == literal[int] :
identifier[data_format] = literal[string]
identifier[ctx] = identifier[get_current_tower_context] ()
identifier[coll_bk] = identifier[backup_collection] ([ identifier[tf] . identifier[GraphKeys] . identifier[UPDATE_OPS] ])
identifier[layer] = identifier[tf] . identifier[layers] . identifier[BatchNormalization] (
identifier[axis] = literal[int] keyword[if] identifier[data_format] == literal[string] keyword[else] literal[int] ,
identifier[momentum] = identifier[momentum] , identifier[epsilon] = identifier[epsilon] ,
identifier[center] = identifier[center] , identifier[scale] = identifier[scale] ,
identifier[renorm] = keyword[True] ,
identifier[renorm_clipping] ={
literal[string] : literal[int] / identifier[rmax] ,
literal[string] : identifier[rmax] ,
literal[string] : identifier[dmax] },
identifier[renorm_momentum] = literal[int] ,
identifier[gamma_initializer] = identifier[gamma_initializer] ,
identifier[fused] = keyword[False] ,
identifier[_reuse] = identifier[tf] . identifier[get_variable_scope] (). identifier[reuse] )
identifier[xn] = identifier[layer] . identifier[apply] ( identifier[x] , identifier[training] = identifier[ctx] . identifier[is_training] , identifier[scope] = identifier[tf] . identifier[get_variable_scope] ())
keyword[if] identifier[ctx] . identifier[is_main_training_tower] :
keyword[for] identifier[v] keyword[in] identifier[layer] . identifier[non_trainable_variables] :
keyword[if] identifier[isinstance] ( identifier[v] , identifier[tf] . identifier[Variable] ):
identifier[tf] . identifier[add_to_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[MODEL_VARIABLES] , identifier[v] )
keyword[else] :
identifier[restore_collection] ( identifier[coll_bk] )
keyword[if] identifier[ndims] == literal[int] :
identifier[xn] = identifier[tf] . identifier[squeeze] ( identifier[xn] ,[ literal[int] , literal[int] ])
identifier[ret] = identifier[tf] . identifier[identity] ( identifier[xn] , identifier[name] = literal[string] )
identifier[vh] = identifier[ret] . identifier[variables] = identifier[VariableHolder] ()
keyword[if] identifier[scale] :
identifier[vh] . identifier[gamma] = identifier[layer] . identifier[gamma]
keyword[if] identifier[center] :
identifier[vh] . identifier[beta] = identifier[layer] . identifier[beta]
keyword[return] identifier[ret] | def BatchRenorm(x, rmax, dmax, momentum=0.9, epsilon=1e-05, center=True, scale=True, gamma_initializer=None, data_format='channels_last'):
"""
Batch Renormalization layer, as described in the paper:
`Batch Renormalization: Towards Reducing Minibatch Dependence in Batch-Normalized Models
<https://arxiv.org/abs/1702.03275>`_.
This implementation is a wrapper around `tf.layers.batch_normalization`.
Args:
x (tf.Tensor): a NHWC or NC tensor.
rmax, dmax (tf.Tensor): a scalar tensor, the maximum allowed corrections.
decay (float): decay rate of moving average.
epsilon (float): epsilon to avoid divide-by-zero.
use_scale, use_bias (bool): whether to use the extra affine transformation or not.
Returns:
tf.Tensor: a tensor named ``output`` with the same shape of x.
Variable Names:
* ``beta``: the bias term.
* ``gamma``: the scale term. Input will be transformed by ``x * gamma + beta``.
* ``moving_mean, renorm_mean, renorm_mean_weight``: See TF documentation.
* ``moving_variance, renorm_stddev, renorm_stddev_weight``: See TF documentation.
"""
shape = x.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
if ndims == 2:
data_format = 'channels_first' # depends on [control=['if'], data=[]]
ctx = get_current_tower_context()
coll_bk = backup_collection([tf.GraphKeys.UPDATE_OPS])
layer = tf.layers.BatchNormalization(axis=1 if data_format == 'channels_first' else 3, momentum=momentum, epsilon=epsilon, center=center, scale=scale, renorm=True, renorm_clipping={'rmin': 1.0 / rmax, 'rmax': rmax, 'dmax': dmax}, renorm_momentum=0.99, gamma_initializer=gamma_initializer, fused=False, _reuse=tf.get_variable_scope().reuse)
xn = layer.apply(x, training=ctx.is_training, scope=tf.get_variable_scope())
if ctx.is_main_training_tower:
for v in layer.non_trainable_variables:
if isinstance(v, tf.Variable):
tf.add_to_collection(tf.GraphKeys.MODEL_VARIABLES, v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]]
else:
# only run UPDATE_OPS in the first tower
restore_collection(coll_bk)
if ndims == 2:
xn = tf.squeeze(xn, [1, 2]) # depends on [control=['if'], data=[]]
ret = tf.identity(xn, name='output')
# TODO not sure whether to add moving_mean/moving_var to VH now
vh = ret.variables = VariableHolder()
if scale:
vh.gamma = layer.gamma # depends on [control=['if'], data=[]]
if center:
vh.beta = layer.beta # depends on [control=['if'], data=[]]
return ret |
def get_remote_port(self, tlv_data):
"""Returns Remote Port from the TLV. """
ret, parsed_val = self._check_common_tlv_format(
tlv_data, "\n", "Port Description TLV")
if not ret:
return None
return parsed_val[1].strip() | def function[get_remote_port, parameter[self, tlv_data]]:
constant[Returns Remote Port from the TLV. ]
<ast.Tuple object at 0x7da1b1be4520> assign[=] call[name[self]._check_common_tlv_format, parameter[name[tlv_data], constant[
], constant[Port Description TLV]]]
if <ast.UnaryOp object at 0x7da1b1be6f80> begin[:]
return[constant[None]]
return[call[call[name[parsed_val]][constant[1]].strip, parameter[]]] | keyword[def] identifier[get_remote_port] ( identifier[self] , identifier[tlv_data] ):
literal[string]
identifier[ret] , identifier[parsed_val] = identifier[self] . identifier[_check_common_tlv_format] (
identifier[tlv_data] , literal[string] , literal[string] )
keyword[if] keyword[not] identifier[ret] :
keyword[return] keyword[None]
keyword[return] identifier[parsed_val] [ literal[int] ]. identifier[strip] () | def get_remote_port(self, tlv_data):
"""Returns Remote Port from the TLV. """
(ret, parsed_val) = self._check_common_tlv_format(tlv_data, '\n', 'Port Description TLV')
if not ret:
return None # depends on [control=['if'], data=[]]
return parsed_val[1].strip() |
def execute(self):
"""
Execute one cpu instruction in the current thread (only one supported).
:rtype: bool
:return: C{True}
:todo: This is where we could implement a simple schedule.
"""
try:
self.current.execute()
self.clocks += 1
if self.clocks % 10000 == 0:
self.check_timers()
self.sched()
except Interruption as e:
if e.N != 0x80:
raise
try:
self.int80(self.current)
except RestartSyscall:
pass
return True | def function[execute, parameter[self]]:
constant[
Execute one cpu instruction in the current thread (only one supported).
:rtype: bool
:return: C{True}
:todo: This is where we could implement a simple schedule.
]
<ast.Try object at 0x7da204620670>
return[constant[True]] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[current] . identifier[execute] ()
identifier[self] . identifier[clocks] += literal[int]
keyword[if] identifier[self] . identifier[clocks] % literal[int] == literal[int] :
identifier[self] . identifier[check_timers] ()
identifier[self] . identifier[sched] ()
keyword[except] identifier[Interruption] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[N] != literal[int] :
keyword[raise]
keyword[try] :
identifier[self] . identifier[int80] ( identifier[self] . identifier[current] )
keyword[except] identifier[RestartSyscall] :
keyword[pass]
keyword[return] keyword[True] | def execute(self):
"""
Execute one cpu instruction in the current thread (only one supported).
:rtype: bool
:return: C{True}
:todo: This is where we could implement a simple schedule.
"""
try:
self.current.execute()
self.clocks += 1
if self.clocks % 10000 == 0:
self.check_timers()
self.sched() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Interruption as e:
if e.N != 128:
raise # depends on [control=['if'], data=[]]
try:
self.int80(self.current) # depends on [control=['try'], data=[]]
except RestartSyscall:
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=['e']]
return True |
def _create_layers(self, n_classes):
"""Create the layers of the model from self.layers.
:param n_classes: number of classes
:return: self
"""
next_layer_feed = tf.reshape(self.input_data,
[-1, self.original_shape[0],
self.original_shape[1],
self.original_shape[2]])
prev_output_dim = self.original_shape[2]
# this flags indicates whether we are building the first dense layer
first_full = True
self.W_vars = []
self.B_vars = []
for i, l in enumerate(self.layers.split(',')):
node = l.split('-')
node_type = node[0]
if node_type == 'conv2d':
# ################### #
# Convolutional Layer #
# ################### #
# fx, fy = shape of the convolutional filter
# feature_maps = number of output dimensions
fx, fy, feature_maps, stride = int(node[1]),\
int(node[2]), int(node[3]), int(node[4])
print('Building Convolutional layer with %d input channels\
and %d %dx%d filters with stride %d' %
(prev_output_dim, feature_maps, fx, fy, stride))
# Create weights and biases
W_conv = self.weight_variable(
[fx, fy, prev_output_dim, feature_maps])
b_conv = self.bias_variable([feature_maps])
self.W_vars.append(W_conv)
self.B_vars.append(b_conv)
# Convolution and Activation function
h_conv = tf.nn.relu(
self.conv2d(next_layer_feed, W_conv, stride) + b_conv)
# keep track of the number of output dims of the previous layer
prev_output_dim = feature_maps
# output node of the last layer
next_layer_feed = h_conv
elif node_type == 'maxpool':
# ################# #
# Max Pooling Layer #
# ################# #
ksize = int(node[1])
print('Building Max Pooling layer with size %d' % ksize)
next_layer_feed = self.max_pool(next_layer_feed, ksize)
elif node_type == 'full':
# ####################### #
# Densely Connected Layer #
# ####################### #
if first_full: # first fully connected layer
dim = int(node[1])
shp = next_layer_feed.get_shape()
tmpx = shp[1].value
tmpy = shp[2].value
fanin = tmpx * tmpy * prev_output_dim
print('Building fully connected layer with %d in units\
and %d out units' % (fanin, dim))
W_fc = self.weight_variable([fanin, dim])
b_fc = self.bias_variable([dim])
self.W_vars.append(W_fc)
self.B_vars.append(b_fc)
h_pool_flat = tf.reshape(next_layer_feed, [-1, fanin])
h_fc = tf.nn.relu(tf.add(
tf.matmul(h_pool_flat, W_fc),
b_fc))
h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob)
prev_output_dim = dim
next_layer_feed = h_fc_drop
first_full = False
else: # not first fully connected layer
dim = int(node[1])
W_fc = self.weight_variable([prev_output_dim, dim])
b_fc = self.bias_variable([dim])
self.W_vars.append(W_fc)
self.B_vars.append(b_fc)
h_fc = tf.nn.relu(tf.add(
tf.matmul(next_layer_feed, W_fc), b_fc))
h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob)
prev_output_dim = dim
next_layer_feed = h_fc_drop
elif node_type == 'softmax':
# ############# #
# Softmax Layer #
# ############# #
print('Building softmax layer with %d in units and\
%d out units' % (prev_output_dim, n_classes))
W_sm = self.weight_variable([prev_output_dim, n_classes])
b_sm = self.bias_variable([n_classes])
self.W_vars.append(W_sm)
self.B_vars.append(b_sm)
self.mod_y = tf.add(tf.matmul(next_layer_feed, W_sm), b_sm) | def function[_create_layers, parameter[self, n_classes]]:
constant[Create the layers of the model from self.layers.
:param n_classes: number of classes
:return: self
]
variable[next_layer_feed] assign[=] call[name[tf].reshape, parameter[name[self].input_data, list[[<ast.UnaryOp object at 0x7da18f09fa30>, <ast.Subscript object at 0x7da18f09ea40>, <ast.Subscript object at 0x7da18f09d1b0>, <ast.Subscript object at 0x7da18f09c220>]]]]
variable[prev_output_dim] assign[=] call[name[self].original_shape][constant[2]]
variable[first_full] assign[=] constant[True]
name[self].W_vars assign[=] list[[]]
name[self].B_vars assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f09f670>, <ast.Name object at 0x7da18f09c5b0>]]] in starred[call[name[enumerate], parameter[call[name[self].layers.split, parameter[constant[,]]]]]] begin[:]
variable[node] assign[=] call[name[l].split, parameter[constant[-]]]
variable[node_type] assign[=] call[name[node]][constant[0]]
if compare[name[node_type] equal[==] constant[conv2d]] begin[:]
<ast.Tuple object at 0x7da18f09ded0> assign[=] tuple[[<ast.Call object at 0x7da18f09dd20>, <ast.Call object at 0x7da18f09e1d0>, <ast.Call object at 0x7da18f09de40>, <ast.Call object at 0x7da18f09e0e0>]]
call[name[print], parameter[binary_operation[constant[Building Convolutional layer with %d input channels and %d %dx%d filters with stride %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09d270>, <ast.Name object at 0x7da18f09dff0>, <ast.Name object at 0x7da18f09ead0>, <ast.Name object at 0x7da18f09d6c0>, <ast.Name object at 0x7da18f09f5b0>]]]]]
variable[W_conv] assign[=] call[name[self].weight_variable, parameter[list[[<ast.Name object at 0x7da18f09c430>, <ast.Name object at 0x7da18f09e6e0>, <ast.Name object at 0x7da18f09de70>, <ast.Name object at 0x7da18f09c520>]]]]
variable[b_conv] assign[=] call[name[self].bias_variable, parameter[list[[<ast.Name object at 0x7da18f09f640>]]]]
call[name[self].W_vars.append, parameter[name[W_conv]]]
call[name[self].B_vars.append, parameter[name[b_conv]]]
variable[h_conv] assign[=] call[name[tf].nn.relu, parameter[binary_operation[call[name[self].conv2d, parameter[name[next_layer_feed], name[W_conv], name[stride]]] + name[b_conv]]]]
variable[prev_output_dim] assign[=] name[feature_maps]
variable[next_layer_feed] assign[=] name[h_conv] | keyword[def] identifier[_create_layers] ( identifier[self] , identifier[n_classes] ):
literal[string]
identifier[next_layer_feed] = identifier[tf] . identifier[reshape] ( identifier[self] . identifier[input_data] ,
[- literal[int] , identifier[self] . identifier[original_shape] [ literal[int] ],
identifier[self] . identifier[original_shape] [ literal[int] ],
identifier[self] . identifier[original_shape] [ literal[int] ]])
identifier[prev_output_dim] = identifier[self] . identifier[original_shape] [ literal[int] ]
identifier[first_full] = keyword[True]
identifier[self] . identifier[W_vars] =[]
identifier[self] . identifier[B_vars] =[]
keyword[for] identifier[i] , identifier[l] keyword[in] identifier[enumerate] ( identifier[self] . identifier[layers] . identifier[split] ( literal[string] )):
identifier[node] = identifier[l] . identifier[split] ( literal[string] )
identifier[node_type] = identifier[node] [ literal[int] ]
keyword[if] identifier[node_type] == literal[string] :
identifier[fx] , identifier[fy] , identifier[feature_maps] , identifier[stride] = identifier[int] ( identifier[node] [ literal[int] ]), identifier[int] ( identifier[node] [ literal[int] ]), identifier[int] ( identifier[node] [ literal[int] ]), identifier[int] ( identifier[node] [ literal[int] ])
identifier[print] ( literal[string] %
( identifier[prev_output_dim] , identifier[feature_maps] , identifier[fx] , identifier[fy] , identifier[stride] ))
identifier[W_conv] = identifier[self] . identifier[weight_variable] (
[ identifier[fx] , identifier[fy] , identifier[prev_output_dim] , identifier[feature_maps] ])
identifier[b_conv] = identifier[self] . identifier[bias_variable] ([ identifier[feature_maps] ])
identifier[self] . identifier[W_vars] . identifier[append] ( identifier[W_conv] )
identifier[self] . identifier[B_vars] . identifier[append] ( identifier[b_conv] )
identifier[h_conv] = identifier[tf] . identifier[nn] . identifier[relu] (
identifier[self] . identifier[conv2d] ( identifier[next_layer_feed] , identifier[W_conv] , identifier[stride] )+ identifier[b_conv] )
identifier[prev_output_dim] = identifier[feature_maps]
identifier[next_layer_feed] = identifier[h_conv]
keyword[elif] identifier[node_type] == literal[string] :
identifier[ksize] = identifier[int] ( identifier[node] [ literal[int] ])
identifier[print] ( literal[string] % identifier[ksize] )
identifier[next_layer_feed] = identifier[self] . identifier[max_pool] ( identifier[next_layer_feed] , identifier[ksize] )
keyword[elif] identifier[node_type] == literal[string] :
keyword[if] identifier[first_full] :
identifier[dim] = identifier[int] ( identifier[node] [ literal[int] ])
identifier[shp] = identifier[next_layer_feed] . identifier[get_shape] ()
identifier[tmpx] = identifier[shp] [ literal[int] ]. identifier[value]
identifier[tmpy] = identifier[shp] [ literal[int] ]. identifier[value]
identifier[fanin] = identifier[tmpx] * identifier[tmpy] * identifier[prev_output_dim]
identifier[print] ( literal[string] %( identifier[fanin] , identifier[dim] ))
identifier[W_fc] = identifier[self] . identifier[weight_variable] ([ identifier[fanin] , identifier[dim] ])
identifier[b_fc] = identifier[self] . identifier[bias_variable] ([ identifier[dim] ])
identifier[self] . identifier[W_vars] . identifier[append] ( identifier[W_fc] )
identifier[self] . identifier[B_vars] . identifier[append] ( identifier[b_fc] )
identifier[h_pool_flat] = identifier[tf] . identifier[reshape] ( identifier[next_layer_feed] ,[- literal[int] , identifier[fanin] ])
identifier[h_fc] = identifier[tf] . identifier[nn] . identifier[relu] ( identifier[tf] . identifier[add] (
identifier[tf] . identifier[matmul] ( identifier[h_pool_flat] , identifier[W_fc] ),
identifier[b_fc] ))
identifier[h_fc_drop] = identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[h_fc] , identifier[self] . identifier[keep_prob] )
identifier[prev_output_dim] = identifier[dim]
identifier[next_layer_feed] = identifier[h_fc_drop]
identifier[first_full] = keyword[False]
keyword[else] :
identifier[dim] = identifier[int] ( identifier[node] [ literal[int] ])
identifier[W_fc] = identifier[self] . identifier[weight_variable] ([ identifier[prev_output_dim] , identifier[dim] ])
identifier[b_fc] = identifier[self] . identifier[bias_variable] ([ identifier[dim] ])
identifier[self] . identifier[W_vars] . identifier[append] ( identifier[W_fc] )
identifier[self] . identifier[B_vars] . identifier[append] ( identifier[b_fc] )
identifier[h_fc] = identifier[tf] . identifier[nn] . identifier[relu] ( identifier[tf] . identifier[add] (
identifier[tf] . identifier[matmul] ( identifier[next_layer_feed] , identifier[W_fc] ), identifier[b_fc] ))
identifier[h_fc_drop] = identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[h_fc] , identifier[self] . identifier[keep_prob] )
identifier[prev_output_dim] = identifier[dim]
identifier[next_layer_feed] = identifier[h_fc_drop]
keyword[elif] identifier[node_type] == literal[string] :
identifier[print] ( literal[string] %( identifier[prev_output_dim] , identifier[n_classes] ))
identifier[W_sm] = identifier[self] . identifier[weight_variable] ([ identifier[prev_output_dim] , identifier[n_classes] ])
identifier[b_sm] = identifier[self] . identifier[bias_variable] ([ identifier[n_classes] ])
identifier[self] . identifier[W_vars] . identifier[append] ( identifier[W_sm] )
identifier[self] . identifier[B_vars] . identifier[append] ( identifier[b_sm] )
identifier[self] . identifier[mod_y] = identifier[tf] . identifier[add] ( identifier[tf] . identifier[matmul] ( identifier[next_layer_feed] , identifier[W_sm] ), identifier[b_sm] ) | def _create_layers(self, n_classes):
"""Create the layers of the model from self.layers.
:param n_classes: number of classes
:return: self
"""
next_layer_feed = tf.reshape(self.input_data, [-1, self.original_shape[0], self.original_shape[1], self.original_shape[2]])
prev_output_dim = self.original_shape[2]
# this flags indicates whether we are building the first dense layer
first_full = True
self.W_vars = []
self.B_vars = []
for (i, l) in enumerate(self.layers.split(',')):
node = l.split('-')
node_type = node[0]
if node_type == 'conv2d':
# ################### #
# Convolutional Layer #
# ################### #
# fx, fy = shape of the convolutional filter
# feature_maps = number of output dimensions
(fx, fy, feature_maps, stride) = (int(node[1]), int(node[2]), int(node[3]), int(node[4]))
print('Building Convolutional layer with %d input channels and %d %dx%d filters with stride %d' % (prev_output_dim, feature_maps, fx, fy, stride))
# Create weights and biases
W_conv = self.weight_variable([fx, fy, prev_output_dim, feature_maps])
b_conv = self.bias_variable([feature_maps])
self.W_vars.append(W_conv)
self.B_vars.append(b_conv)
# Convolution and Activation function
h_conv = tf.nn.relu(self.conv2d(next_layer_feed, W_conv, stride) + b_conv)
# keep track of the number of output dims of the previous layer
prev_output_dim = feature_maps
# output node of the last layer
next_layer_feed = h_conv # depends on [control=['if'], data=[]]
elif node_type == 'maxpool':
# ################# #
# Max Pooling Layer #
# ################# #
ksize = int(node[1])
print('Building Max Pooling layer with size %d' % ksize)
next_layer_feed = self.max_pool(next_layer_feed, ksize) # depends on [control=['if'], data=[]]
elif node_type == 'full':
# ####################### #
# Densely Connected Layer #
# ####################### #
if first_full: # first fully connected layer
dim = int(node[1])
shp = next_layer_feed.get_shape()
tmpx = shp[1].value
tmpy = shp[2].value
fanin = tmpx * tmpy * prev_output_dim
print('Building fully connected layer with %d in units and %d out units' % (fanin, dim))
W_fc = self.weight_variable([fanin, dim])
b_fc = self.bias_variable([dim])
self.W_vars.append(W_fc)
self.B_vars.append(b_fc)
h_pool_flat = tf.reshape(next_layer_feed, [-1, fanin])
h_fc = tf.nn.relu(tf.add(tf.matmul(h_pool_flat, W_fc), b_fc))
h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob)
prev_output_dim = dim
next_layer_feed = h_fc_drop
first_full = False # depends on [control=['if'], data=[]]
else: # not first fully connected layer
dim = int(node[1])
W_fc = self.weight_variable([prev_output_dim, dim])
b_fc = self.bias_variable([dim])
self.W_vars.append(W_fc)
self.B_vars.append(b_fc)
h_fc = tf.nn.relu(tf.add(tf.matmul(next_layer_feed, W_fc), b_fc))
h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob)
prev_output_dim = dim
next_layer_feed = h_fc_drop # depends on [control=['if'], data=[]]
elif node_type == 'softmax':
# ############# #
# Softmax Layer #
# ############# #
print('Building softmax layer with %d in units and %d out units' % (prev_output_dim, n_classes))
W_sm = self.weight_variable([prev_output_dim, n_classes])
b_sm = self.bias_variable([n_classes])
self.W_vars.append(W_sm)
self.B_vars.append(b_sm)
self.mod_y = tf.add(tf.matmul(next_layer_feed, W_sm), b_sm) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def _get_list(self, value, context=None):
"""
Get a configuration value. The result is None if "value" is None,
otherwise the result is a list.
"value" may be a list, dict, or str value.
If a list, each element of the list may be a list, dict, or
str value, and the value extraction proceeds recursively.
During processing, if a dict is encountered, each element of
the dict is checked for existence in the context. If it
exists the associated value will be processed recursively as
before.
The final result will be the flattened list resulting from the
recursion. Even if the initial "value" is a str, the result
will be a list, with one element.
"""
log = self._params.get('log', self._discard)
res = []
if value is None:
return res
if context is None:
context = self._context
if isinstance(value, list):
log.debug("Processing list %s", value)
for v in value:
res.extend(self._get_list(v, context=context))
elif isinstance(value, dict):
log.debug("Processing dict %s", value)
for k in value:
if k in context:
res.extend(self._get_list(value[k], context=context))
else:
log.debug("Processing value '%s'", value)
res.append(value)
return res | def function[_get_list, parameter[self, value, context]]:
constant[
Get a configuration value. The result is None if "value" is None,
otherwise the result is a list.
"value" may be a list, dict, or str value.
If a list, each element of the list may be a list, dict, or
str value, and the value extraction proceeds recursively.
During processing, if a dict is encountered, each element of
the dict is checked for existence in the context. If it
exists the associated value will be processed recursively as
before.
The final result will be the flattened list resulting from the
recursion. Even if the initial "value" is a str, the result
will be a list, with one element.
]
variable[log] assign[=] call[name[self]._params.get, parameter[constant[log], name[self]._discard]]
variable[res] assign[=] list[[]]
if compare[name[value] is constant[None]] begin[:]
return[name[res]]
if compare[name[context] is constant[None]] begin[:]
variable[context] assign[=] name[self]._context
if call[name[isinstance], parameter[name[value], name[list]]] begin[:]
call[name[log].debug, parameter[constant[Processing list %s], name[value]]]
for taget[name[v]] in starred[name[value]] begin[:]
call[name[res].extend, parameter[call[name[self]._get_list, parameter[name[v]]]]]
return[name[res]] | keyword[def] identifier[_get_list] ( identifier[self] , identifier[value] , identifier[context] = keyword[None] ):
literal[string]
identifier[log] = identifier[self] . identifier[_params] . identifier[get] ( literal[string] , identifier[self] . identifier[_discard] )
identifier[res] =[]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[res]
keyword[if] identifier[context] keyword[is] keyword[None] :
identifier[context] = identifier[self] . identifier[_context]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[value] )
keyword[for] identifier[v] keyword[in] identifier[value] :
identifier[res] . identifier[extend] ( identifier[self] . identifier[_get_list] ( identifier[v] , identifier[context] = identifier[context] ))
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[dict] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[value] )
keyword[for] identifier[k] keyword[in] identifier[value] :
keyword[if] identifier[k] keyword[in] identifier[context] :
identifier[res] . identifier[extend] ( identifier[self] . identifier[_get_list] ( identifier[value] [ identifier[k] ], identifier[context] = identifier[context] ))
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] , identifier[value] )
identifier[res] . identifier[append] ( identifier[value] )
keyword[return] identifier[res] | def _get_list(self, value, context=None):
"""
Get a configuration value. The result is None if "value" is None,
otherwise the result is a list.
"value" may be a list, dict, or str value.
If a list, each element of the list may be a list, dict, or
str value, and the value extraction proceeds recursively.
During processing, if a dict is encountered, each element of
the dict is checked for existence in the context. If it
exists the associated value will be processed recursively as
before.
The final result will be the flattened list resulting from the
recursion. Even if the initial "value" is a str, the result
will be a list, with one element.
"""
log = self._params.get('log', self._discard)
res = []
if value is None:
return res # depends on [control=['if'], data=[]]
if context is None:
context = self._context # depends on [control=['if'], data=['context']]
if isinstance(value, list):
log.debug('Processing list %s', value)
for v in value:
res.extend(self._get_list(v, context=context)) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]]
elif isinstance(value, dict):
log.debug('Processing dict %s', value)
for k in value:
if k in context:
res.extend(self._get_list(value[k], context=context)) # depends on [control=['if'], data=['k', 'context']] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
else:
log.debug("Processing value '%s'", value)
res.append(value)
return res |
def _block_collection(iterator, dtype, bsize=-1):
"""Pack rdd with a specific collection constructor."""
i = 0
accumulated = []
for a in iterator:
if (bsize > 0) and (i >= bsize):
yield _pack_accumulated(accumulated, dtype)
accumulated = []
i = 0
accumulated.append(a)
i += 1
if i > 0:
yield _pack_accumulated(accumulated, dtype) | def function[_block_collection, parameter[iterator, dtype, bsize]]:
constant[Pack rdd with a specific collection constructor.]
variable[i] assign[=] constant[0]
variable[accumulated] assign[=] list[[]]
for taget[name[a]] in starred[name[iterator]] begin[:]
if <ast.BoolOp object at 0x7da18f723340> begin[:]
<ast.Yield object at 0x7da1b0339ff0>
variable[accumulated] assign[=] list[[]]
variable[i] assign[=] constant[0]
call[name[accumulated].append, parameter[name[a]]]
<ast.AugAssign object at 0x7da1b033b2e0>
if compare[name[i] greater[>] constant[0]] begin[:]
<ast.Yield object at 0x7da1b0338fd0> | keyword[def] identifier[_block_collection] ( identifier[iterator] , identifier[dtype] , identifier[bsize] =- literal[int] ):
literal[string]
identifier[i] = literal[int]
identifier[accumulated] =[]
keyword[for] identifier[a] keyword[in] identifier[iterator] :
keyword[if] ( identifier[bsize] > literal[int] ) keyword[and] ( identifier[i] >= identifier[bsize] ):
keyword[yield] identifier[_pack_accumulated] ( identifier[accumulated] , identifier[dtype] )
identifier[accumulated] =[]
identifier[i] = literal[int]
identifier[accumulated] . identifier[append] ( identifier[a] )
identifier[i] += literal[int]
keyword[if] identifier[i] > literal[int] :
keyword[yield] identifier[_pack_accumulated] ( identifier[accumulated] , identifier[dtype] ) | def _block_collection(iterator, dtype, bsize=-1):
"""Pack rdd with a specific collection constructor."""
i = 0
accumulated = []
for a in iterator:
if bsize > 0 and i >= bsize:
yield _pack_accumulated(accumulated, dtype)
accumulated = []
i = 0 # depends on [control=['if'], data=[]]
accumulated.append(a)
i += 1 # depends on [control=['for'], data=['a']]
if i > 0:
yield _pack_accumulated(accumulated, dtype) # depends on [control=['if'], data=[]] |
def unregister(self, type_, handler):
"""注销事件处理函数监听"""
# 尝试获取该事件类型对应的处理函数列表,若无则忽略该次注销请求
handlerList = self.__handlers[type_]
# 如果该函数存在于列表中,则移除
if handler in handlerList:
handlerList.remove(handler)
# 如果函数列表为空,则从引擎中移除该事件类型
if not handlerList:
del self.__handlers[type_] | def function[unregister, parameter[self, type_, handler]]:
constant[注销事件处理函数监听]
variable[handlerList] assign[=] call[name[self].__handlers][name[type_]]
if compare[name[handler] in name[handlerList]] begin[:]
call[name[handlerList].remove, parameter[name[handler]]]
if <ast.UnaryOp object at 0x7da20c992980> begin[:]
<ast.Delete object at 0x7da20c991a20> | keyword[def] identifier[unregister] ( identifier[self] , identifier[type_] , identifier[handler] ):
literal[string]
identifier[handlerList] = identifier[self] . identifier[__handlers] [ identifier[type_] ]
keyword[if] identifier[handler] keyword[in] identifier[handlerList] :
identifier[handlerList] . identifier[remove] ( identifier[handler] )
keyword[if] keyword[not] identifier[handlerList] :
keyword[del] identifier[self] . identifier[__handlers] [ identifier[type_] ] | def unregister(self, type_, handler):
"""注销事件处理函数监听""" # 尝试获取该事件类型对应的处理函数列表,若无则忽略该次注销请求
handlerList = self.__handlers[type_]
# 如果该函数存在于列表中,则移除
if handler in handlerList:
handlerList.remove(handler) # depends on [control=['if'], data=['handler', 'handlerList']]
# 如果函数列表为空,则从引擎中移除该事件类型
if not handlerList:
del self.__handlers[type_] # depends on [control=['if'], data=[]] |
def setup_endpoints(provider):
"""Setup the OpenID Connect Provider endpoints."""
app_routing = {}
endpoints = [
AuthorizationEndpoint(
pyoidcMiddleware(provider.authorization_endpoint)),
TokenEndpoint(
pyoidcMiddleware(provider.token_endpoint)),
UserinfoEndpoint(
pyoidcMiddleware(provider.userinfo_endpoint)),
RegistrationEndpoint(
pyoidcMiddleware(provider.registration_endpoint)),
EndSessionEndpoint(
pyoidcMiddleware(provider.endsession_endpoint))
]
for ep in endpoints:
app_routing["/{}".format(ep.etype)] = ep
return app_routing | def function[setup_endpoints, parameter[provider]]:
constant[Setup the OpenID Connect Provider endpoints.]
variable[app_routing] assign[=] dictionary[[], []]
variable[endpoints] assign[=] list[[<ast.Call object at 0x7da204566b90>, <ast.Call object at 0x7da204564d30>, <ast.Call object at 0x7da204566aa0>, <ast.Call object at 0x7da204567ac0>, <ast.Call object at 0x7da204567160>]]
for taget[name[ep]] in starred[name[endpoints]] begin[:]
call[name[app_routing]][call[constant[/{}].format, parameter[name[ep].etype]]] assign[=] name[ep]
return[name[app_routing]] | keyword[def] identifier[setup_endpoints] ( identifier[provider] ):
literal[string]
identifier[app_routing] ={}
identifier[endpoints] =[
identifier[AuthorizationEndpoint] (
identifier[pyoidcMiddleware] ( identifier[provider] . identifier[authorization_endpoint] )),
identifier[TokenEndpoint] (
identifier[pyoidcMiddleware] ( identifier[provider] . identifier[token_endpoint] )),
identifier[UserinfoEndpoint] (
identifier[pyoidcMiddleware] ( identifier[provider] . identifier[userinfo_endpoint] )),
identifier[RegistrationEndpoint] (
identifier[pyoidcMiddleware] ( identifier[provider] . identifier[registration_endpoint] )),
identifier[EndSessionEndpoint] (
identifier[pyoidcMiddleware] ( identifier[provider] . identifier[endsession_endpoint] ))
]
keyword[for] identifier[ep] keyword[in] identifier[endpoints] :
identifier[app_routing] [ literal[string] . identifier[format] ( identifier[ep] . identifier[etype] )]= identifier[ep]
keyword[return] identifier[app_routing] | def setup_endpoints(provider):
"""Setup the OpenID Connect Provider endpoints."""
app_routing = {}
endpoints = [AuthorizationEndpoint(pyoidcMiddleware(provider.authorization_endpoint)), TokenEndpoint(pyoidcMiddleware(provider.token_endpoint)), UserinfoEndpoint(pyoidcMiddleware(provider.userinfo_endpoint)), RegistrationEndpoint(pyoidcMiddleware(provider.registration_endpoint)), EndSessionEndpoint(pyoidcMiddleware(provider.endsession_endpoint))]
for ep in endpoints:
app_routing['/{}'.format(ep.etype)] = ep # depends on [control=['for'], data=['ep']]
return app_routing |
def delete(self, where=None, start=None, stop=None, **kwargs):
"""
support fully deleting the node in its entirety (only) - where
specification must be None
"""
if com._all_none(where, start, stop):
self._handle.remove_node(self.group, recursive=True)
return None
raise TypeError("cannot delete on an abstract storer") | def function[delete, parameter[self, where, start, stop]]:
constant[
support fully deleting the node in its entirety (only) - where
specification must be None
]
if call[name[com]._all_none, parameter[name[where], name[start], name[stop]]] begin[:]
call[name[self]._handle.remove_node, parameter[name[self].group]]
return[constant[None]]
<ast.Raise object at 0x7da18bcca620> | keyword[def] identifier[delete] ( identifier[self] , identifier[where] = keyword[None] , identifier[start] = keyword[None] , identifier[stop] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[com] . identifier[_all_none] ( identifier[where] , identifier[start] , identifier[stop] ):
identifier[self] . identifier[_handle] . identifier[remove_node] ( identifier[self] . identifier[group] , identifier[recursive] = keyword[True] )
keyword[return] keyword[None]
keyword[raise] identifier[TypeError] ( literal[string] ) | def delete(self, where=None, start=None, stop=None, **kwargs):
"""
support fully deleting the node in its entirety (only) - where
specification must be None
"""
if com._all_none(where, start, stop):
self._handle.remove_node(self.group, recursive=True)
return None # depends on [control=['if'], data=[]]
raise TypeError('cannot delete on an abstract storer') |
def _validate_response(self, response, message, exclude_code=None): # pylint: disable=no-self-use
"""
validate an api server response
:param dict response: server response to check
:param str message: error message to raise
:param int exclude_code: error codes to exclude from errorhandling
:return:
":raises Exception: on error
"""
if 'code' in response and response['code'] >= 2000:
if exclude_code is not None and response['code'] == exclude_code:
return
raise Exception("{0}: {1} ({2})".format(
message, response['msg'], response['code'])) | def function[_validate_response, parameter[self, response, message, exclude_code]]:
constant[
validate an api server response
:param dict response: server response to check
:param str message: error message to raise
:param int exclude_code: error codes to exclude from errorhandling
:return:
":raises Exception: on error
]
if <ast.BoolOp object at 0x7da1b2389ba0> begin[:]
if <ast.BoolOp object at 0x7da1b23884f0> begin[:]
return[None]
<ast.Raise object at 0x7da1b2389fc0> | keyword[def] identifier[_validate_response] ( identifier[self] , identifier[response] , identifier[message] , identifier[exclude_code] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[response] keyword[and] identifier[response] [ literal[string] ]>= literal[int] :
keyword[if] identifier[exclude_code] keyword[is] keyword[not] keyword[None] keyword[and] identifier[response] [ literal[string] ]== identifier[exclude_code] :
keyword[return]
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] (
identifier[message] , identifier[response] [ literal[string] ], identifier[response] [ literal[string] ])) | def _validate_response(self, response, message, exclude_code=None): # pylint: disable=no-self-use
'\n validate an api server response\n\n :param dict response: server response to check\n :param str message: error message to raise\n :param int exclude_code: error codes to exclude from errorhandling\n :return:\n ":raises Exception: on error\n '
if 'code' in response and response['code'] >= 2000:
if exclude_code is not None and response['code'] == exclude_code:
return # depends on [control=['if'], data=[]]
raise Exception('{0}: {1} ({2})'.format(message, response['msg'], response['code'])) # depends on [control=['if'], data=[]] |
def get_files_from_dir(path, recursive=True, depth=0, file_ext='.py'):
"""Retrieve the list of files from a folder.
@param path: file or directory where to search files
@param recursive: if True will search also sub-directories
@param depth: if explore recursively, the depth of sub directories to follow
@param file_ext: the files extension to get. Default is '.py'
@return: the file list retrieved. if the input is a file then a one element list.
"""
file_list = []
if os.path.isfile(path) or path == '-':
return [path]
if path[-1] != os.sep:
path = path + os.sep
for f in glob.glob(path + "*"):
if os.path.isdir(f):
if depth < MAX_DEPTH_RECUR: # avoid infinite recursive loop
file_list.extend(get_files_from_dir(f, recursive, depth + 1))
else:
continue
elif f.endswith(file_ext):
file_list.append(f)
return file_list | def function[get_files_from_dir, parameter[path, recursive, depth, file_ext]]:
constant[Retrieve the list of files from a folder.
@param path: file or directory where to search files
@param recursive: if True will search also sub-directories
@param depth: if explore recursively, the depth of sub directories to follow
@param file_ext: the files extension to get. Default is '.py'
@return: the file list retrieved. if the input is a file then a one element list.
]
variable[file_list] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b1195000> begin[:]
return[list[[<ast.Name object at 0x7da1b1196da0>]]]
if compare[call[name[path]][<ast.UnaryOp object at 0x7da1b1194190>] not_equal[!=] name[os].sep] begin[:]
variable[path] assign[=] binary_operation[name[path] + name[os].sep]
for taget[name[f]] in starred[call[name[glob].glob, parameter[binary_operation[name[path] + constant[*]]]]] begin[:]
if call[name[os].path.isdir, parameter[name[f]]] begin[:]
if compare[name[depth] less[<] name[MAX_DEPTH_RECUR]] begin[:]
call[name[file_list].extend, parameter[call[name[get_files_from_dir], parameter[name[f], name[recursive], binary_operation[name[depth] + constant[1]]]]]]
return[name[file_list]] | keyword[def] identifier[get_files_from_dir] ( identifier[path] , identifier[recursive] = keyword[True] , identifier[depth] = literal[int] , identifier[file_ext] = literal[string] ):
literal[string]
identifier[file_list] =[]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ) keyword[or] identifier[path] == literal[string] :
keyword[return] [ identifier[path] ]
keyword[if] identifier[path] [- literal[int] ]!= identifier[os] . identifier[sep] :
identifier[path] = identifier[path] + identifier[os] . identifier[sep]
keyword[for] identifier[f] keyword[in] identifier[glob] . identifier[glob] ( identifier[path] + literal[string] ):
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[f] ):
keyword[if] identifier[depth] < identifier[MAX_DEPTH_RECUR] :
identifier[file_list] . identifier[extend] ( identifier[get_files_from_dir] ( identifier[f] , identifier[recursive] , identifier[depth] + literal[int] ))
keyword[else] :
keyword[continue]
keyword[elif] identifier[f] . identifier[endswith] ( identifier[file_ext] ):
identifier[file_list] . identifier[append] ( identifier[f] )
keyword[return] identifier[file_list] | def get_files_from_dir(path, recursive=True, depth=0, file_ext='.py'):
"""Retrieve the list of files from a folder.
@param path: file or directory where to search files
@param recursive: if True will search also sub-directories
@param depth: if explore recursively, the depth of sub directories to follow
@param file_ext: the files extension to get. Default is '.py'
@return: the file list retrieved. if the input is a file then a one element list.
"""
file_list = []
if os.path.isfile(path) or path == '-':
return [path] # depends on [control=['if'], data=[]]
if path[-1] != os.sep:
path = path + os.sep # depends on [control=['if'], data=[]]
for f in glob.glob(path + '*'):
if os.path.isdir(f):
if depth < MAX_DEPTH_RECUR: # avoid infinite recursive loop
file_list.extend(get_files_from_dir(f, recursive, depth + 1)) # depends on [control=['if'], data=['depth']]
else:
continue # depends on [control=['if'], data=[]]
elif f.endswith(file_ext):
file_list.append(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
return file_list |
def outliers_gesd(data, outliers = 5, report = False, alpha=0.05):
"""
The generalized (Extreme Studentized Deviate) ESD test is used
to detect one or more outliers in a univariate data set that follows
an approximately normal distribution [1]_.
Parameters
----------
data : array_like or ndarray, 1d
An array, any object exposing the array interface, containing
data to test for outliers.
outliers : int, optional
Number of potential outliers to test for. Test is two-tailed, i.e.
maximum and minimum values are checked for potential outliers.
report : bool, optional
Specifies whether to return a summary table of the test.
Available options are:
1) True - return a summary table
2) False - return the array with outliers removed. (default)
alpha : float, optional
Significance level for a hypothesis test. Default is 0.05.
Returns
-------
Numpy array if hypo is False or a bool value of a hypothesis test result.
Notes
-----
.. [1] Rosner, Bernard (May 1983), Percentage Points for a Generalized
ESD Many-Outlier Procedure,Technometrics, 25(2), pp. 165-172.
Examples
--------
>>> data = np.array([-0.25, 0.68, 0.94, 1.15, 1.2, 1.26, 1.26, 1.34, 1.38, 1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7, 1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09, 2.1, 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4, 2.47, 2.54, 2.62, 2.64, 2.9, 2.92, 2.92, 2.93, 3.21, 3.26, 3.3, 3.59, 3.68, 4.3, 4.64, 5.34, 5.42, 6.01])
>>> outliers_gesd(data, 5)
array([-0.25, 0.68, 0.94, 1.15, 1.2 , 1.26, 1.26, 1.34, 1.38,
1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7 ,
1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09,
2.1 , 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4 ,
2.47, 2.54, 2.62, 2.64, 2.9 , 2.92, 2.92, 2.93, 3.21,
3.26, 3.3 , 3.59, 3.68, 4.3 , 4.64])
>>> outliers_gesd(data, outliers = 5, report = True)
H0: no outliers in the data
Ha: up to 5 outliers in the data
Significance level: α = 0.05
Reject H0 if Ri > Critical Value (λi)
Summary Table for Two-Tailed Test
---------------------------------------
Exact Test Critical
Number of Statistic Value, λi
Outliers, i Value, Ri 5 %
---------------------------------------
1 3.119 3.159
2 2.943 3.151
3 3.179 3.144 *
4 2.81 3.136
5 2.816 3.128
"""
Rs, ls = np.zeros(outliers, dtype = np.float), np.zeros(outliers, dtype = np.float)
ms = []
data = np.sort(np.array(data))
data_proc = np.copy(data)
n = data_proc.size
mean = np.mean(data_proc)
for i in np.arange(outliers):
abs_d = np.abs(data_proc - np.mean(data_proc))
# R-value calculation
R = np.max(abs_d) / np.std(data_proc, ddof=1)
Rs[i] = R
# Masked values
lms = ms[-1] if len(ms) > 0 else []
ms.append(lms + [np.argmax(abs_d)])
# Lambdas calculation
p = 1 - alpha / (2 * (n - i))
df = n - i - 2
t_ppr = t.ppf(p, df)
lambd = ((n - i - 1) * t_ppr) / np.sqrt((n - i - 2 + t_ppr**2) * (n - i))
ls[i] = lambd
# Remove the observation that maximizes |xi − xmean|
data_proc = np.delete(data_proc, np.argmax(abs_d))
if report:
report = ["H0: no outliers in the data",
"Ha: up to " + str(outliers) + " outliers in the data",
"Significance level: α = " + str(alpha),
"Reject H0 if Ri > Critical Value (λi)", "",
"Summary Table for Two-Tailed Test",
"---------------------------------------",
" Exact Test Critical",
" Number of Statistic Value, λi",
"Outliers, i Value, Ri 5 %",
"---------------------------------------"]
for i, (r, l) in enumerate(zip(Rs, ls)):
report.append('{: >11s}'.format(str(i+1)) + \
'{: >15s}'.format(str(np.round(r, 3))) + \
'{: >13s}'.format(str(np.round(l, 3))) + (" *" if r > l else ""))
print("\n".join(report))
else:
# Remove masked values
# for which the test statistic is greater
# than the critical value and return the result
if any(Rs > ls):
data = np.delete(data, ms[np.max(np.where(Rs > ls))])
return data | def function[outliers_gesd, parameter[data, outliers, report, alpha]]:
constant[
The generalized (Extreme Studentized Deviate) ESD test is used
to detect one or more outliers in a univariate data set that follows
an approximately normal distribution [1]_.
Parameters
----------
data : array_like or ndarray, 1d
An array, any object exposing the array interface, containing
data to test for outliers.
outliers : int, optional
Number of potential outliers to test for. Test is two-tailed, i.e.
maximum and minimum values are checked for potential outliers.
report : bool, optional
Specifies whether to return a summary table of the test.
Available options are:
1) True - return a summary table
2) False - return the array with outliers removed. (default)
alpha : float, optional
Significance level for a hypothesis test. Default is 0.05.
Returns
-------
Numpy array if hypo is False or a bool value of a hypothesis test result.
Notes
-----
.. [1] Rosner, Bernard (May 1983), Percentage Points for a Generalized
ESD Many-Outlier Procedure,Technometrics, 25(2), pp. 165-172.
Examples
--------
>>> data = np.array([-0.25, 0.68, 0.94, 1.15, 1.2, 1.26, 1.26, 1.34, 1.38, 1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7, 1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09, 2.1, 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4, 2.47, 2.54, 2.62, 2.64, 2.9, 2.92, 2.92, 2.93, 3.21, 3.26, 3.3, 3.59, 3.68, 4.3, 4.64, 5.34, 5.42, 6.01])
>>> outliers_gesd(data, 5)
array([-0.25, 0.68, 0.94, 1.15, 1.2 , 1.26, 1.26, 1.34, 1.38,
1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7 ,
1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09,
2.1 , 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4 ,
2.47, 2.54, 2.62, 2.64, 2.9 , 2.92, 2.92, 2.93, 3.21,
3.26, 3.3 , 3.59, 3.68, 4.3 , 4.64])
>>> outliers_gesd(data, outliers = 5, report = True)
H0: no outliers in the data
Ha: up to 5 outliers in the data
Significance level: α = 0.05
Reject H0 if Ri > Critical Value (λi)
Summary Table for Two-Tailed Test
---------------------------------------
Exact Test Critical
Number of Statistic Value, λi
Outliers, i Value, Ri 5 %
---------------------------------------
1 3.119 3.159
2 2.943 3.151
3 3.179 3.144 *
4 2.81 3.136
5 2.816 3.128
]
<ast.Tuple object at 0x7da1b12c0f40> assign[=] tuple[[<ast.Call object at 0x7da1b12c1540>, <ast.Call object at 0x7da1b12c31f0>]]
variable[ms] assign[=] list[[]]
variable[data] assign[=] call[name[np].sort, parameter[call[name[np].array, parameter[name[data]]]]]
variable[data_proc] assign[=] call[name[np].copy, parameter[name[data]]]
variable[n] assign[=] name[data_proc].size
variable[mean] assign[=] call[name[np].mean, parameter[name[data_proc]]]
for taget[name[i]] in starred[call[name[np].arange, parameter[name[outliers]]]] begin[:]
variable[abs_d] assign[=] call[name[np].abs, parameter[binary_operation[name[data_proc] - call[name[np].mean, parameter[name[data_proc]]]]]]
variable[R] assign[=] binary_operation[call[name[np].max, parameter[name[abs_d]]] / call[name[np].std, parameter[name[data_proc]]]]
call[name[Rs]][name[i]] assign[=] name[R]
variable[lms] assign[=] <ast.IfExp object at 0x7da1b1242920>
call[name[ms].append, parameter[binary_operation[name[lms] + list[[<ast.Call object at 0x7da1b1241480>]]]]]
variable[p] assign[=] binary_operation[constant[1] - binary_operation[name[alpha] / binary_operation[constant[2] * binary_operation[name[n] - name[i]]]]]
variable[df] assign[=] binary_operation[binary_operation[name[n] - name[i]] - constant[2]]
variable[t_ppr] assign[=] call[name[t].ppf, parameter[name[p], name[df]]]
variable[lambd] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[n] - name[i]] - constant[1]] * name[t_ppr]] / call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[n] - name[i]] - constant[2]] + binary_operation[name[t_ppr] ** constant[2]]] * binary_operation[name[n] - name[i]]]]]]
call[name[ls]][name[i]] assign[=] name[lambd]
variable[data_proc] assign[=] call[name[np].delete, parameter[name[data_proc], call[name[np].argmax, parameter[name[abs_d]]]]]
if name[report] begin[:]
variable[report] assign[=] list[[<ast.Constant object at 0x7da1b1241a20>, <ast.BinOp object at 0x7da1b1242200>, <ast.BinOp object at 0x7da1b1240610>, <ast.Constant object at 0x7da1b1242c50>, <ast.Constant object at 0x7da1b12430a0>, <ast.Constant object at 0x7da1b1240fa0>, <ast.Constant object at 0x7da1b1240700>, <ast.Constant object at 0x7da1b1240e80>, <ast.Constant object at 0x7da1b1242110>, <ast.Constant object at 0x7da1b1242260>, <ast.Constant object at 0x7da1b1240910>]]
for taget[tuple[[<ast.Name object at 0x7da1b1241bd0>, <ast.Tuple object at 0x7da1b1241c90>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[Rs], name[ls]]]]]] begin[:]
call[name[report].append, parameter[binary_operation[binary_operation[binary_operation[call[constant[{: >11s}].format, parameter[call[name[str], parameter[binary_operation[name[i] + constant[1]]]]]] + call[constant[{: >15s}].format, parameter[call[name[str], parameter[call[name[np].round, parameter[name[r], constant[3]]]]]]]] + call[constant[{: >13s}].format, parameter[call[name[str], parameter[call[name[np].round, parameter[name[l], constant[3]]]]]]]] + <ast.IfExp object at 0x7da1b12b4b50>]]]
call[name[print], parameter[call[constant[
].join, parameter[name[report]]]]] | keyword[def] identifier[outliers_gesd] ( identifier[data] , identifier[outliers] = literal[int] , identifier[report] = keyword[False] , identifier[alpha] = literal[int] ):
literal[string]
identifier[Rs] , identifier[ls] = identifier[np] . identifier[zeros] ( identifier[outliers] , identifier[dtype] = identifier[np] . identifier[float] ), identifier[np] . identifier[zeros] ( identifier[outliers] , identifier[dtype] = identifier[np] . identifier[float] )
identifier[ms] =[]
identifier[data] = identifier[np] . identifier[sort] ( identifier[np] . identifier[array] ( identifier[data] ))
identifier[data_proc] = identifier[np] . identifier[copy] ( identifier[data] )
identifier[n] = identifier[data_proc] . identifier[size]
identifier[mean] = identifier[np] . identifier[mean] ( identifier[data_proc] )
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[outliers] ):
identifier[abs_d] = identifier[np] . identifier[abs] ( identifier[data_proc] - identifier[np] . identifier[mean] ( identifier[data_proc] ))
identifier[R] = identifier[np] . identifier[max] ( identifier[abs_d] )/ identifier[np] . identifier[std] ( identifier[data_proc] , identifier[ddof] = literal[int] )
identifier[Rs] [ identifier[i] ]= identifier[R]
identifier[lms] = identifier[ms] [- literal[int] ] keyword[if] identifier[len] ( identifier[ms] )> literal[int] keyword[else] []
identifier[ms] . identifier[append] ( identifier[lms] +[ identifier[np] . identifier[argmax] ( identifier[abs_d] )])
identifier[p] = literal[int] - identifier[alpha] /( literal[int] *( identifier[n] - identifier[i] ))
identifier[df] = identifier[n] - identifier[i] - literal[int]
identifier[t_ppr] = identifier[t] . identifier[ppf] ( identifier[p] , identifier[df] )
identifier[lambd] =(( identifier[n] - identifier[i] - literal[int] )* identifier[t_ppr] )/ identifier[np] . identifier[sqrt] (( identifier[n] - identifier[i] - literal[int] + identifier[t_ppr] ** literal[int] )*( identifier[n] - identifier[i] ))
identifier[ls] [ identifier[i] ]= identifier[lambd]
identifier[data_proc] = identifier[np] . identifier[delete] ( identifier[data_proc] , identifier[np] . identifier[argmax] ( identifier[abs_d] ))
keyword[if] identifier[report] :
identifier[report] =[ literal[string] ,
literal[string] + identifier[str] ( identifier[outliers] )+ literal[string] ,
literal[string] + identifier[str] ( identifier[alpha] ),
literal[string] , literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
keyword[for] identifier[i] ,( identifier[r] , identifier[l] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[Rs] , identifier[ls] )):
identifier[report] . identifier[append] ( literal[string] . identifier[format] ( identifier[str] ( identifier[i] + literal[int] ))+ literal[string] . identifier[format] ( identifier[str] ( identifier[np] . identifier[round] ( identifier[r] , literal[int] )))+ literal[string] . identifier[format] ( identifier[str] ( identifier[np] . identifier[round] ( identifier[l] , literal[int] )))+( literal[string] keyword[if] identifier[r] > identifier[l] keyword[else] literal[string] ))
identifier[print] ( literal[string] . identifier[join] ( identifier[report] ))
keyword[else] :
keyword[if] identifier[any] ( identifier[Rs] > identifier[ls] ):
identifier[data] = identifier[np] . identifier[delete] ( identifier[data] , identifier[ms] [ identifier[np] . identifier[max] ( identifier[np] . identifier[where] ( identifier[Rs] > identifier[ls] ))])
keyword[return] identifier[data] | def outliers_gesd(data, outliers=5, report=False, alpha=0.05):
"""
The generalized (Extreme Studentized Deviate) ESD test is used
to detect one or more outliers in a univariate data set that follows
an approximately normal distribution [1]_.
Parameters
----------
data : array_like or ndarray, 1d
An array, any object exposing the array interface, containing
data to test for outliers.
outliers : int, optional
Number of potential outliers to test for. Test is two-tailed, i.e.
maximum and minimum values are checked for potential outliers.
report : bool, optional
Specifies whether to return a summary table of the test.
Available options are:
1) True - return a summary table
2) False - return the array with outliers removed. (default)
alpha : float, optional
Significance level for a hypothesis test. Default is 0.05.
Returns
-------
Numpy array if hypo is False or a bool value of a hypothesis test result.
Notes
-----
.. [1] Rosner, Bernard (May 1983), Percentage Points for a Generalized
ESD Many-Outlier Procedure,Technometrics, 25(2), pp. 165-172.
Examples
--------
>>> data = np.array([-0.25, 0.68, 0.94, 1.15, 1.2, 1.26, 1.26, 1.34, 1.38, 1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7, 1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09, 2.1, 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4, 2.47, 2.54, 2.62, 2.64, 2.9, 2.92, 2.92, 2.93, 3.21, 3.26, 3.3, 3.59, 3.68, 4.3, 4.64, 5.34, 5.42, 6.01])
>>> outliers_gesd(data, 5)
array([-0.25, 0.68, 0.94, 1.15, 1.2 , 1.26, 1.26, 1.34, 1.38,
1.43, 1.49, 1.49, 1.55, 1.56, 1.58, 1.65, 1.69, 1.7 ,
1.76, 1.77, 1.81, 1.91, 1.94, 1.96, 1.99, 2.06, 2.09,
2.1 , 2.14, 2.15, 2.23, 2.24, 2.26, 2.35, 2.37, 2.4 ,
2.47, 2.54, 2.62, 2.64, 2.9 , 2.92, 2.92, 2.93, 3.21,
3.26, 3.3 , 3.59, 3.68, 4.3 , 4.64])
>>> outliers_gesd(data, outliers = 5, report = True)
H0: no outliers in the data
Ha: up to 5 outliers in the data
Significance level: α = 0.05
Reject H0 if Ri > Critical Value (λi)
Summary Table for Two-Tailed Test
---------------------------------------
Exact Test Critical
Number of Statistic Value, λi
Outliers, i Value, Ri 5 %
---------------------------------------
1 3.119 3.159
2 2.943 3.151
3 3.179 3.144 *
4 2.81 3.136
5 2.816 3.128
"""
(Rs, ls) = (np.zeros(outliers, dtype=np.float), np.zeros(outliers, dtype=np.float))
ms = []
data = np.sort(np.array(data))
data_proc = np.copy(data)
n = data_proc.size
mean = np.mean(data_proc)
for i in np.arange(outliers):
abs_d = np.abs(data_proc - np.mean(data_proc))
# R-value calculation
R = np.max(abs_d) / np.std(data_proc, ddof=1)
Rs[i] = R
# Masked values
lms = ms[-1] if len(ms) > 0 else []
ms.append(lms + [np.argmax(abs_d)])
# Lambdas calculation
p = 1 - alpha / (2 * (n - i))
df = n - i - 2
t_ppr = t.ppf(p, df)
lambd = (n - i - 1) * t_ppr / np.sqrt((n - i - 2 + t_ppr ** 2) * (n - i))
ls[i] = lambd
# Remove the observation that maximizes |xi − xmean|
data_proc = np.delete(data_proc, np.argmax(abs_d)) # depends on [control=['for'], data=['i']]
if report:
report = ['H0: no outliers in the data', 'Ha: up to ' + str(outliers) + ' outliers in the data', 'Significance level: α = ' + str(alpha), 'Reject H0 if Ri > Critical Value (λi)', '', 'Summary Table for Two-Tailed Test', '---------------------------------------', ' Exact Test Critical', ' Number of Statistic Value, λi', 'Outliers, i Value, Ri 5 %', '---------------------------------------']
for (i, (r, l)) in enumerate(zip(Rs, ls)):
report.append('{: >11s}'.format(str(i + 1)) + '{: >15s}'.format(str(np.round(r, 3))) + '{: >13s}'.format(str(np.round(l, 3))) + (' *' if r > l else '')) # depends on [control=['for'], data=[]]
print('\n'.join(report)) # depends on [control=['if'], data=[]]
else:
# Remove masked values
# for which the test statistic is greater
# than the critical value and return the result
if any(Rs > ls):
data = np.delete(data, ms[np.max(np.where(Rs > ls))]) # depends on [control=['if'], data=[]]
return data |
def get_semester_title(self, node: BaseNode):
"""
get the semester of a node
"""
log.debug("Getting Semester Title for %s" % node.course.id)
return self._get_semester_from_id(node.course.semester) | def function[get_semester_title, parameter[self, node]]:
constant[
get the semester of a node
]
call[name[log].debug, parameter[binary_operation[constant[Getting Semester Title for %s] <ast.Mod object at 0x7da2590d6920> name[node].course.id]]]
return[call[name[self]._get_semester_from_id, parameter[name[node].course.semester]]] | keyword[def] identifier[get_semester_title] ( identifier[self] , identifier[node] : identifier[BaseNode] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] % identifier[node] . identifier[course] . identifier[id] )
keyword[return] identifier[self] . identifier[_get_semester_from_id] ( identifier[node] . identifier[course] . identifier[semester] ) | def get_semester_title(self, node: BaseNode):
"""
get the semester of a node
"""
log.debug('Getting Semester Title for %s' % node.course.id)
return self._get_semester_from_id(node.course.semester) |
def face_angles_sparse(mesh):
"""
A sparse matrix representation of the face angles.
Returns
----------
sparse: scipy.sparse.coo_matrix with:
dtype: float
shape: (len(mesh.vertices), len(mesh.faces))
"""
matrix = coo_matrix((mesh.face_angles.flatten(),
(mesh.faces_sparse.row, mesh.faces_sparse.col)),
mesh.faces_sparse.shape)
return matrix | def function[face_angles_sparse, parameter[mesh]]:
constant[
A sparse matrix representation of the face angles.
Returns
----------
sparse: scipy.sparse.coo_matrix with:
dtype: float
shape: (len(mesh.vertices), len(mesh.faces))
]
variable[matrix] assign[=] call[name[coo_matrix], parameter[tuple[[<ast.Call object at 0x7da20c7cb100>, <ast.Tuple object at 0x7da20c7ca9b0>]], name[mesh].faces_sparse.shape]]
return[name[matrix]] | keyword[def] identifier[face_angles_sparse] ( identifier[mesh] ):
literal[string]
identifier[matrix] = identifier[coo_matrix] (( identifier[mesh] . identifier[face_angles] . identifier[flatten] (),
( identifier[mesh] . identifier[faces_sparse] . identifier[row] , identifier[mesh] . identifier[faces_sparse] . identifier[col] )),
identifier[mesh] . identifier[faces_sparse] . identifier[shape] )
keyword[return] identifier[matrix] | def face_angles_sparse(mesh):
"""
A sparse matrix representation of the face angles.
Returns
----------
sparse: scipy.sparse.coo_matrix with:
dtype: float
shape: (len(mesh.vertices), len(mesh.faces))
"""
matrix = coo_matrix((mesh.face_angles.flatten(), (mesh.faces_sparse.row, mesh.faces_sparse.col)), mesh.faces_sparse.shape)
return matrix |
def config_mode(self, config_command="sudo su"):
"""Attempt to become root."""
delay_factor = self.select_delay_factor(delay_factor=1)
output = ""
if not self.check_config_mode():
output += self.send_command_timing(
config_command, strip_prompt=False, strip_command=False
)
if "Password:" in output:
output = self.write_channel(self.normalize_cmd(self.secret))
self.set_prompt(prompt_terminator="#")
time.sleep(1 * delay_factor)
self.set_base_prompt()
if not self.check_config_mode():
raise ValueError("Failed to configuration mode")
return output | def function[config_mode, parameter[self, config_command]]:
constant[Attempt to become root.]
variable[delay_factor] assign[=] call[name[self].select_delay_factor, parameter[]]
variable[output] assign[=] constant[]
if <ast.UnaryOp object at 0x7da2054a7970> begin[:]
<ast.AugAssign object at 0x7da2054a5d20>
if compare[constant[Password:] in name[output]] begin[:]
variable[output] assign[=] call[name[self].write_channel, parameter[call[name[self].normalize_cmd, parameter[name[self].secret]]]]
call[name[self].set_prompt, parameter[]]
call[name[time].sleep, parameter[binary_operation[constant[1] * name[delay_factor]]]]
call[name[self].set_base_prompt, parameter[]]
if <ast.UnaryOp object at 0x7da2054a4430> begin[:]
<ast.Raise object at 0x7da2054a4af0>
return[name[output]] | keyword[def] identifier[config_mode] ( identifier[self] , identifier[config_command] = literal[string] ):
literal[string]
identifier[delay_factor] = identifier[self] . identifier[select_delay_factor] ( identifier[delay_factor] = literal[int] )
identifier[output] = literal[string]
keyword[if] keyword[not] identifier[self] . identifier[check_config_mode] ():
identifier[output] += identifier[self] . identifier[send_command_timing] (
identifier[config_command] , identifier[strip_prompt] = keyword[False] , identifier[strip_command] = keyword[False]
)
keyword[if] literal[string] keyword[in] identifier[output] :
identifier[output] = identifier[self] . identifier[write_channel] ( identifier[self] . identifier[normalize_cmd] ( identifier[self] . identifier[secret] ))
identifier[self] . identifier[set_prompt] ( identifier[prompt_terminator] = literal[string] )
identifier[time] . identifier[sleep] ( literal[int] * identifier[delay_factor] )
identifier[self] . identifier[set_base_prompt] ()
keyword[if] keyword[not] identifier[self] . identifier[check_config_mode] ():
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[output] | def config_mode(self, config_command='sudo su'):
"""Attempt to become root."""
delay_factor = self.select_delay_factor(delay_factor=1)
output = ''
if not self.check_config_mode():
output += self.send_command_timing(config_command, strip_prompt=False, strip_command=False)
if 'Password:' in output:
output = self.write_channel(self.normalize_cmd(self.secret)) # depends on [control=['if'], data=['output']]
self.set_prompt(prompt_terminator='#')
time.sleep(1 * delay_factor)
self.set_base_prompt()
if not self.check_config_mode():
raise ValueError('Failed to configuration mode') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return output |
def opt_restore(prefix, opts):
"""
Given a dict of opts, add the given prefix to each key
"""
return {prefix + name: value for name, value in opts.items()} | def function[opt_restore, parameter[prefix, opts]]:
constant[
Given a dict of opts, add the given prefix to each key
]
return[<ast.DictComp object at 0x7da1b16a9870>] | keyword[def] identifier[opt_restore] ( identifier[prefix] , identifier[opts] ):
literal[string]
keyword[return] { identifier[prefix] + identifier[name] : identifier[value] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[opts] . identifier[items] ()} | def opt_restore(prefix, opts):
"""
Given a dict of opts, add the given prefix to each key
"""
return {prefix + name: value for (name, value) in opts.items()} |
def deserialise(self, content) -> Element:
"""
Deserialises the given compact JSON into an element.
>>> deserialiser = CompactJSONDeserialiser()
>>> deserialiser.deserialise('["string", null, null, "Hi"]')
String(content='Hi')
"""
content = json.loads(content)
if not isinstance(content, list):
raise ValueError('Given content was not compact JSON refract')
return self.deserialise_element(content) | def function[deserialise, parameter[self, content]]:
constant[
Deserialises the given compact JSON into an element.
>>> deserialiser = CompactJSONDeserialiser()
>>> deserialiser.deserialise('["string", null, null, "Hi"]')
String(content='Hi')
]
variable[content] assign[=] call[name[json].loads, parameter[name[content]]]
if <ast.UnaryOp object at 0x7da1b198c640> begin[:]
<ast.Raise object at 0x7da1b198e3b0>
return[call[name[self].deserialise_element, parameter[name[content]]]] | keyword[def] identifier[deserialise] ( identifier[self] , identifier[content] )-> identifier[Element] :
literal[string]
identifier[content] = identifier[json] . identifier[loads] ( identifier[content] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[content] , identifier[list] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[deserialise_element] ( identifier[content] ) | def deserialise(self, content) -> Element:
"""
Deserialises the given compact JSON into an element.
>>> deserialiser = CompactJSONDeserialiser()
>>> deserialiser.deserialise('["string", null, null, "Hi"]')
String(content='Hi')
"""
content = json.loads(content)
if not isinstance(content, list):
raise ValueError('Given content was not compact JSON refract') # depends on [control=['if'], data=[]]
return self.deserialise_element(content) |
def _compute_or_skip_on_error(calc, compute_kwargs):
"""Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
"""
try:
return calc.compute(**compute_kwargs)
except Exception:
msg = ("Skipping aospy calculation `{0}` due to error with the "
"following traceback: \n{1}")
logging.warning(msg.format(calc, traceback.format_exc()))
return None | def function[_compute_or_skip_on_error, parameter[calc, compute_kwargs]]:
constant[Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
]
<ast.Try object at 0x7da1b04d0a90> | keyword[def] identifier[_compute_or_skip_on_error] ( identifier[calc] , identifier[compute_kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[calc] . identifier[compute] (** identifier[compute_kwargs] )
keyword[except] identifier[Exception] :
identifier[msg] =( literal[string]
literal[string] )
identifier[logging] . identifier[warning] ( identifier[msg] . identifier[format] ( identifier[calc] , identifier[traceback] . identifier[format_exc] ()))
keyword[return] keyword[None] | def _compute_or_skip_on_error(calc, compute_kwargs):
"""Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
"""
try:
return calc.compute(**compute_kwargs) # depends on [control=['try'], data=[]]
except Exception:
msg = 'Skipping aospy calculation `{0}` due to error with the following traceback: \n{1}'
logging.warning(msg.format(calc, traceback.format_exc()))
return None # depends on [control=['except'], data=[]] |
def list_deployments(jboss_config):
'''
List all deployments on the jboss instance
jboss_config
Configuration dictionary with properties specified above.
CLI Example:
.. code-block:: bash
salt '*' jboss7.list_deployments '{"cli_path": "integration.modules.sysmod.SysModuleTest.test_valid_docs", "controller": "10.11.12.13:9999", "cli_user": "jbossadm", "cli_password": "jbossadm"}'
'''
log.debug("======================== MODULE FUNCTION: jboss7.list_deployments")
command_result = __salt__['jboss7_cli.run_command'](jboss_config, 'deploy')
deployments = []
if command_result['stdout']:
deployments = re.split('\\s*', command_result['stdout'])
log.debug('deployments=%s', deployments)
return deployments | def function[list_deployments, parameter[jboss_config]]:
constant[
List all deployments on the jboss instance
jboss_config
Configuration dictionary with properties specified above.
CLI Example:
.. code-block:: bash
salt '*' jboss7.list_deployments '{"cli_path": "integration.modules.sysmod.SysModuleTest.test_valid_docs", "controller": "10.11.12.13:9999", "cli_user": "jbossadm", "cli_password": "jbossadm"}'
]
call[name[log].debug, parameter[constant[======================== MODULE FUNCTION: jboss7.list_deployments]]]
variable[command_result] assign[=] call[call[name[__salt__]][constant[jboss7_cli.run_command]], parameter[name[jboss_config], constant[deploy]]]
variable[deployments] assign[=] list[[]]
if call[name[command_result]][constant[stdout]] begin[:]
variable[deployments] assign[=] call[name[re].split, parameter[constant[\s*], call[name[command_result]][constant[stdout]]]]
call[name[log].debug, parameter[constant[deployments=%s], name[deployments]]]
return[name[deployments]] | keyword[def] identifier[list_deployments] ( identifier[jboss_config] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] )
identifier[command_result] = identifier[__salt__] [ literal[string] ]( identifier[jboss_config] , literal[string] )
identifier[deployments] =[]
keyword[if] identifier[command_result] [ literal[string] ]:
identifier[deployments] = identifier[re] . identifier[split] ( literal[string] , identifier[command_result] [ literal[string] ])
identifier[log] . identifier[debug] ( literal[string] , identifier[deployments] )
keyword[return] identifier[deployments] | def list_deployments(jboss_config):
"""
List all deployments on the jboss instance
jboss_config
Configuration dictionary with properties specified above.
CLI Example:
.. code-block:: bash
salt '*' jboss7.list_deployments '{"cli_path": "integration.modules.sysmod.SysModuleTest.test_valid_docs", "controller": "10.11.12.13:9999", "cli_user": "jbossadm", "cli_password": "jbossadm"}'
"""
log.debug('======================== MODULE FUNCTION: jboss7.list_deployments')
command_result = __salt__['jboss7_cli.run_command'](jboss_config, 'deploy')
deployments = []
if command_result['stdout']:
deployments = re.split('\\s*', command_result['stdout']) # depends on [control=['if'], data=[]]
log.debug('deployments=%s', deployments)
return deployments |
def all_elements(self):
"""returns all public BasePageElements grouped by this element and it parent(s)
:rtype: list[(str, BasePageElement)]
"""
return [(k, getattr(self, k)) for k, v in get_members_safety(self.__class__)
if not k.startswith("_") and isinstance(v, (BasePageElement,))] | def function[all_elements, parameter[self]]:
constant[returns all public BasePageElements grouped by this element and it parent(s)
:rtype: list[(str, BasePageElement)]
]
return[<ast.ListComp object at 0x7da1b13087f0>] | keyword[def] identifier[all_elements] ( identifier[self] ):
literal[string]
keyword[return] [( identifier[k] , identifier[getattr] ( identifier[self] , identifier[k] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[get_members_safety] ( identifier[self] . identifier[__class__] )
keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] ) keyword[and] identifier[isinstance] ( identifier[v] ,( identifier[BasePageElement] ,))] | def all_elements(self):
"""returns all public BasePageElements grouped by this element and it parent(s)
:rtype: list[(str, BasePageElement)]
"""
return [(k, getattr(self, k)) for (k, v) in get_members_safety(self.__class__) if not k.startswith('_') and isinstance(v, (BasePageElement,))] |
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
context = {
'title': _('Log in'),
'app_path': request.get_full_path(),
}
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = request.get_full_path()
context.update(extra_context or {})
defaults = {
'extra_context': context,
'current_app': self.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
return login(request, **defaults) | def function[login, parameter[self, request, extra_context]]:
constant[
Displays the login form for the given HttpRequest.
]
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9bd90>, <ast.Constant object at 0x7da18dc9b6d0>], [<ast.Call object at 0x7da18dc9b010>, <ast.Call object at 0x7da18dc99e10>]]
if <ast.BoolOp object at 0x7da18dc987f0> begin[:]
call[name[context]][name[REDIRECT_FIELD_NAME]] assign[=] call[name[request].get_full_path, parameter[]]
call[name[context].update, parameter[<ast.BoolOp object at 0x7da18dc99870>]]
variable[defaults] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9b5e0>, <ast.Constant object at 0x7da18dc9ae00>, <ast.Constant object at 0x7da18dc99120>, <ast.Constant object at 0x7da18dc9bbe0>], [<ast.Name object at 0x7da18dc98e50>, <ast.Attribute object at 0x7da18dc991e0>, <ast.BoolOp object at 0x7da18dc99bd0>, <ast.BoolOp object at 0x7da18dc99390>]]
return[call[name[login], parameter[name[request]]]] | keyword[def] identifier[login] ( identifier[self] , identifier[request] , identifier[extra_context] = keyword[None] ):
literal[string]
identifier[context] ={
literal[string] : identifier[_] ( literal[string] ),
literal[string] : identifier[request] . identifier[get_full_path] (),
}
keyword[if] ( identifier[REDIRECT_FIELD_NAME] keyword[not] keyword[in] identifier[request] . identifier[GET] keyword[and]
identifier[REDIRECT_FIELD_NAME] keyword[not] keyword[in] identifier[request] . identifier[POST] ):
identifier[context] [ identifier[REDIRECT_FIELD_NAME] ]= identifier[request] . identifier[get_full_path] ()
identifier[context] . identifier[update] ( identifier[extra_context] keyword[or] {})
identifier[defaults] ={
literal[string] : identifier[context] ,
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[login_form] keyword[or] identifier[AdminAuthenticationForm] ,
literal[string] : identifier[self] . identifier[login_template] keyword[or] literal[string] ,
}
keyword[return] identifier[login] ( identifier[request] ,** identifier[defaults] ) | def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
context = {'title': _('Log in'), 'app_path': request.get_full_path()}
if REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST:
context[REDIRECT_FIELD_NAME] = request.get_full_path() # depends on [control=['if'], data=[]]
context.update(extra_context or {})
defaults = {'extra_context': context, 'current_app': self.name, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html'}
return login(request, **defaults) |
def column_structural_typicality(X_L_list, col_id):
"""Returns how typical column is (opposite of how anomalous)."""
count = 0
for X_L in X_L_list:
for c in range(len(X_L['column_partition']['assignments'])):
if X_L['column_partition']['assignments'][col_id] ==\
X_L['column_partition']['assignments'][c]:
count += 1
return float(count) / \
(len(X_L_list) * len(X_L_list[0]['column_partition']['assignments'])) | def function[column_structural_typicality, parameter[X_L_list, col_id]]:
constant[Returns how typical column is (opposite of how anomalous).]
variable[count] assign[=] constant[0]
for taget[name[X_L]] in starred[name[X_L_list]] begin[:]
for taget[name[c]] in starred[call[name[range], parameter[call[name[len], parameter[call[call[name[X_L]][constant[column_partition]]][constant[assignments]]]]]]] begin[:]
if compare[call[call[call[name[X_L]][constant[column_partition]]][constant[assignments]]][name[col_id]] equal[==] call[call[call[name[X_L]][constant[column_partition]]][constant[assignments]]][name[c]]] begin[:]
<ast.AugAssign object at 0x7da1b28202e0>
return[binary_operation[call[name[float], parameter[name[count]]] / binary_operation[call[name[len], parameter[name[X_L_list]]] * call[name[len], parameter[call[call[call[name[X_L_list]][constant[0]]][constant[column_partition]]][constant[assignments]]]]]]] | keyword[def] identifier[column_structural_typicality] ( identifier[X_L_list] , identifier[col_id] ):
literal[string]
identifier[count] = literal[int]
keyword[for] identifier[X_L] keyword[in] identifier[X_L_list] :
keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[len] ( identifier[X_L] [ literal[string] ][ literal[string] ])):
keyword[if] identifier[X_L] [ literal[string] ][ literal[string] ][ identifier[col_id] ]== identifier[X_L] [ literal[string] ][ literal[string] ][ identifier[c] ]:
identifier[count] += literal[int]
keyword[return] identifier[float] ( identifier[count] )/( identifier[len] ( identifier[X_L_list] )* identifier[len] ( identifier[X_L_list] [ literal[int] ][ literal[string] ][ literal[string] ])) | def column_structural_typicality(X_L_list, col_id):
"""Returns how typical column is (opposite of how anomalous)."""
count = 0
for X_L in X_L_list:
for c in range(len(X_L['column_partition']['assignments'])):
if X_L['column_partition']['assignments'][col_id] == X_L['column_partition']['assignments'][c]:
count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['X_L']]
return float(count) / (len(X_L_list) * len(X_L_list[0]['column_partition']['assignments'])) |
def set_type(self, agent_type):
"""Set the type of agent to spawn in Holodeck. Currently accepted agents are: DiscreteSphereAgent, UAVAgent,
and AndroidAgent.
Args:
agent_type (str): The type of agent to spawn.
"""
type_str = SpawnAgentCommand.__type_keys[agent_type]
self.add_string_parameters(type_str) | def function[set_type, parameter[self, agent_type]]:
constant[Set the type of agent to spawn in Holodeck. Currently accepted agents are: DiscreteSphereAgent, UAVAgent,
and AndroidAgent.
Args:
agent_type (str): The type of agent to spawn.
]
variable[type_str] assign[=] call[name[SpawnAgentCommand].__type_keys][name[agent_type]]
call[name[self].add_string_parameters, parameter[name[type_str]]] | keyword[def] identifier[set_type] ( identifier[self] , identifier[agent_type] ):
literal[string]
identifier[type_str] = identifier[SpawnAgentCommand] . identifier[__type_keys] [ identifier[agent_type] ]
identifier[self] . identifier[add_string_parameters] ( identifier[type_str] ) | def set_type(self, agent_type):
"""Set the type of agent to spawn in Holodeck. Currently accepted agents are: DiscreteSphereAgent, UAVAgent,
and AndroidAgent.
Args:
agent_type (str): The type of agent to spawn.
"""
type_str = SpawnAgentCommand.__type_keys[agent_type]
self.add_string_parameters(type_str) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.