code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def _parse_control_fields(self, fields, tag_id="tag"):
"""
Parse control fields.
Args:
fields (list): list of HTMLElements
tag_id (str): parameter name, which holds the information, about
field name this is normally "tag", but in case of
oai_marc "id".
"""
for field in fields:
params = field.params
# skip tags without parameters
if tag_id not in params:
continue
self.controlfields[params[tag_id]] = field.getContent().strip()
|
def function[_parse_control_fields, parameter[self, fields, tag_id]]:
constant[
Parse control fields.
Args:
fields (list): list of HTMLElements
tag_id (str): parameter name, which holds the information, about
field name this is normally "tag", but in case of
oai_marc "id".
]
for taget[name[field]] in starred[name[fields]] begin[:]
variable[params] assign[=] name[field].params
if compare[name[tag_id] <ast.NotIn object at 0x7da2590d7190> name[params]] begin[:]
continue
call[name[self].controlfields][call[name[params]][name[tag_id]]] assign[=] call[call[name[field].getContent, parameter[]].strip, parameter[]]
|
keyword[def] identifier[_parse_control_fields] ( identifier[self] , identifier[fields] , identifier[tag_id] = literal[string] ):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[params] = identifier[field] . identifier[params]
keyword[if] identifier[tag_id] keyword[not] keyword[in] identifier[params] :
keyword[continue]
identifier[self] . identifier[controlfields] [ identifier[params] [ identifier[tag_id] ]]= identifier[field] . identifier[getContent] (). identifier[strip] ()
|
def _parse_control_fields(self, fields, tag_id='tag'):
"""
Parse control fields.
Args:
fields (list): list of HTMLElements
tag_id (str): parameter name, which holds the information, about
field name this is normally "tag", but in case of
oai_marc "id".
"""
for field in fields:
params = field.params
# skip tags without parameters
if tag_id not in params:
continue # depends on [control=['if'], data=[]]
self.controlfields[params[tag_id]] = field.getContent().strip() # depends on [control=['for'], data=['field']]
|
def init(globalvars=None, show=False):
"""
Load profile INI
"""
global config
profileini = getprofileini()
if os.path.exists(profileini):
config = configparser.ConfigParser()
config.read(profileini)
mgr = plugins_get_mgr()
mgr.update_configs(config)
if show:
for source in config:
print("[%s] :" %(source))
for k in config[source]:
print(" %s : %s" % (k, config[source][k]))
else:
print("Profile does not exist. So creating one")
if not show:
update(globalvars)
print("Complete init")
|
def function[init, parameter[globalvars, show]]:
constant[
Load profile INI
]
<ast.Global object at 0x7da1b008cf40>
variable[profileini] assign[=] call[name[getprofileini], parameter[]]
if call[name[os].path.exists, parameter[name[profileini]]] begin[:]
variable[config] assign[=] call[name[configparser].ConfigParser, parameter[]]
call[name[config].read, parameter[name[profileini]]]
variable[mgr] assign[=] call[name[plugins_get_mgr], parameter[]]
call[name[mgr].update_configs, parameter[name[config]]]
if name[show] begin[:]
for taget[name[source]] in starred[name[config]] begin[:]
call[name[print], parameter[binary_operation[constant[[%s] :] <ast.Mod object at 0x7da2590d6920> name[source]]]]
for taget[name[k]] in starred[call[name[config]][name[source]]] begin[:]
call[name[print], parameter[binary_operation[constant[ %s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1afe0e110>, <ast.Subscript object at 0x7da1afe0d3c0>]]]]]
call[name[print], parameter[constant[Complete init]]]
|
keyword[def] identifier[init] ( identifier[globalvars] = keyword[None] , identifier[show] = keyword[False] ):
literal[string]
keyword[global] identifier[config]
identifier[profileini] = identifier[getprofileini] ()
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[profileini] ):
identifier[config] = identifier[configparser] . identifier[ConfigParser] ()
identifier[config] . identifier[read] ( identifier[profileini] )
identifier[mgr] = identifier[plugins_get_mgr] ()
identifier[mgr] . identifier[update_configs] ( identifier[config] )
keyword[if] identifier[show] :
keyword[for] identifier[source] keyword[in] identifier[config] :
identifier[print] ( literal[string] %( identifier[source] ))
keyword[for] identifier[k] keyword[in] identifier[config] [ identifier[source] ]:
identifier[print] ( literal[string] %( identifier[k] , identifier[config] [ identifier[source] ][ identifier[k] ]))
keyword[else] :
identifier[print] ( literal[string] )
keyword[if] keyword[not] identifier[show] :
identifier[update] ( identifier[globalvars] )
identifier[print] ( literal[string] )
|
def init(globalvars=None, show=False):
"""
Load profile INI
"""
global config
profileini = getprofileini()
if os.path.exists(profileini):
config = configparser.ConfigParser()
config.read(profileini)
mgr = plugins_get_mgr()
mgr.update_configs(config)
if show:
for source in config:
print('[%s] :' % source)
for k in config[source]:
print(' %s : %s' % (k, config[source][k])) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['source']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('Profile does not exist. So creating one')
if not show:
update(globalvars) # depends on [control=['if'], data=[]]
print('Complete init')
|
def on_epoch_end(self, epoch, **kwargs:Any)->None:
"Compare the value monitored to its best and maybe reduce lr."
current = self.get_monitor_value()
if current is None: return
if self.operator(current - self.min_delta, self.best): self.best,self.wait = current,0
else:
self.wait += 1
if self.wait > self.patience:
self.opt.lr *= self.factor
self.wait = 0
print(f'Epoch {epoch}: reducing lr to {self.opt.lr}')
|
def function[on_epoch_end, parameter[self, epoch]]:
constant[Compare the value monitored to its best and maybe reduce lr.]
variable[current] assign[=] call[name[self].get_monitor_value, parameter[]]
if compare[name[current] is constant[None]] begin[:]
return[None]
if call[name[self].operator, parameter[binary_operation[name[current] - name[self].min_delta], name[self].best]] begin[:]
<ast.Tuple object at 0x7da20cabf790> assign[=] tuple[[<ast.Name object at 0x7da20cabe890>, <ast.Constant object at 0x7da20cabd0c0>]]
|
keyword[def] identifier[on_epoch_end] ( identifier[self] , identifier[epoch] ,** identifier[kwargs] : identifier[Any] )-> keyword[None] :
literal[string]
identifier[current] = identifier[self] . identifier[get_monitor_value] ()
keyword[if] identifier[current] keyword[is] keyword[None] : keyword[return]
keyword[if] identifier[self] . identifier[operator] ( identifier[current] - identifier[self] . identifier[min_delta] , identifier[self] . identifier[best] ): identifier[self] . identifier[best] , identifier[self] . identifier[wait] = identifier[current] , literal[int]
keyword[else] :
identifier[self] . identifier[wait] += literal[int]
keyword[if] identifier[self] . identifier[wait] > identifier[self] . identifier[patience] :
identifier[self] . identifier[opt] . identifier[lr] *= identifier[self] . identifier[factor]
identifier[self] . identifier[wait] = literal[int]
identifier[print] ( literal[string] )
|
def on_epoch_end(self, epoch, **kwargs: Any) -> None:
"""Compare the value monitored to its best and maybe reduce lr."""
current = self.get_monitor_value()
if current is None:
return # depends on [control=['if'], data=[]]
if self.operator(current - self.min_delta, self.best):
(self.best, self.wait) = (current, 0) # depends on [control=['if'], data=[]]
else:
self.wait += 1
if self.wait > self.patience:
self.opt.lr *= self.factor
self.wait = 0
print(f'Epoch {epoch}: reducing lr to {self.opt.lr}') # depends on [control=['if'], data=[]]
|
def circuit_to_quirk_url(circuit: circuits.Circuit,
prefer_unknown_gate_to_failure: bool=False,
escape_url=True) -> str:
"""Returns a Quirk URL for the given circuit.
Args:
circuit: The circuit to open in Quirk.
prefer_unknown_gate_to_failure: If not set, gates that fail to convert
will cause this function to raise an error. If set, a URL
containing bad gates will be generated. (Quirk will open the URL,
and replace the bad gates with parse errors, but still get the rest
of the circuit.)
escape_url: If set, the generated URL will have special characters such
as quotes escaped using %. This makes it possible to paste the URL
into forums and the command line and etc and have it properly
parse. If not set, the generated URL will be more compact and human
readable (and can still be pasted directly into a browser's address
bar).
Returns:
"""
circuit = circuit.copy()
linearize_circuit_qubits(circuit)
cols = [] # Type: List[List[Any]]
for moment in circuit:
can_merges = []
for op in moment.operations:
for col, can_merge in _to_quirk_cols(
op,
prefer_unknown_gate_to_failure):
if can_merge:
can_merges.append(col)
else:
cols.append(col)
if can_merges:
merged_col = [1] * max(len(e) for e in can_merges)
for col in can_merges:
for i in range(len(col)):
if col[i] != 1:
merged_col[i] = col[i]
cols.append(merged_col)
circuit_json = json.JSONEncoder(ensure_ascii=False,
separators=(',', ':'),
sort_keys=True).encode({'cols': cols})
if escape_url:
suffix = urllib.parse.quote(circuit_json)
else:
suffix = circuit_json
return 'http://algassert.com/quirk#circuit={}'.format(suffix)
|
def function[circuit_to_quirk_url, parameter[circuit, prefer_unknown_gate_to_failure, escape_url]]:
constant[Returns a Quirk URL for the given circuit.
Args:
circuit: The circuit to open in Quirk.
prefer_unknown_gate_to_failure: If not set, gates that fail to convert
will cause this function to raise an error. If set, a URL
containing bad gates will be generated. (Quirk will open the URL,
and replace the bad gates with parse errors, but still get the rest
of the circuit.)
escape_url: If set, the generated URL will have special characters such
as quotes escaped using %. This makes it possible to paste the URL
into forums and the command line and etc and have it properly
parse. If not set, the generated URL will be more compact and human
readable (and can still be pasted directly into a browser's address
bar).
Returns:
]
variable[circuit] assign[=] call[name[circuit].copy, parameter[]]
call[name[linearize_circuit_qubits], parameter[name[circuit]]]
variable[cols] assign[=] list[[]]
for taget[name[moment]] in starred[name[circuit]] begin[:]
variable[can_merges] assign[=] list[[]]
for taget[name[op]] in starred[name[moment].operations] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1ce5060>, <ast.Name object at 0x7da1b1ce4c10>]]] in starred[call[name[_to_quirk_cols], parameter[name[op], name[prefer_unknown_gate_to_failure]]]] begin[:]
if name[can_merge] begin[:]
call[name[can_merges].append, parameter[name[col]]]
if name[can_merges] begin[:]
variable[merged_col] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1ce4730>]] * call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b1ce5d50>]]]
for taget[name[col]] in starred[name[can_merges]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[col]]]]]] begin[:]
if compare[call[name[col]][name[i]] not_equal[!=] constant[1]] begin[:]
call[name[merged_col]][name[i]] assign[=] call[name[col]][name[i]]
call[name[cols].append, parameter[name[merged_col]]]
variable[circuit_json] assign[=] call[call[name[json].JSONEncoder, parameter[]].encode, parameter[dictionary[[<ast.Constant object at 0x7da1b1ce4910>], [<ast.Name object at 0x7da1b1ce6500>]]]]
if name[escape_url] begin[:]
variable[suffix] assign[=] call[name[urllib].parse.quote, parameter[name[circuit_json]]]
return[call[constant[http://algassert.com/quirk#circuit={}].format, parameter[name[suffix]]]]
|
keyword[def] identifier[circuit_to_quirk_url] ( identifier[circuit] : identifier[circuits] . identifier[Circuit] ,
identifier[prefer_unknown_gate_to_failure] : identifier[bool] = keyword[False] ,
identifier[escape_url] = keyword[True] )-> identifier[str] :
literal[string]
identifier[circuit] = identifier[circuit] . identifier[copy] ()
identifier[linearize_circuit_qubits] ( identifier[circuit] )
identifier[cols] =[]
keyword[for] identifier[moment] keyword[in] identifier[circuit] :
identifier[can_merges] =[]
keyword[for] identifier[op] keyword[in] identifier[moment] . identifier[operations] :
keyword[for] identifier[col] , identifier[can_merge] keyword[in] identifier[_to_quirk_cols] (
identifier[op] ,
identifier[prefer_unknown_gate_to_failure] ):
keyword[if] identifier[can_merge] :
identifier[can_merges] . identifier[append] ( identifier[col] )
keyword[else] :
identifier[cols] . identifier[append] ( identifier[col] )
keyword[if] identifier[can_merges] :
identifier[merged_col] =[ literal[int] ]* identifier[max] ( identifier[len] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[can_merges] )
keyword[for] identifier[col] keyword[in] identifier[can_merges] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[col] )):
keyword[if] identifier[col] [ identifier[i] ]!= literal[int] :
identifier[merged_col] [ identifier[i] ]= identifier[col] [ identifier[i] ]
identifier[cols] . identifier[append] ( identifier[merged_col] )
identifier[circuit_json] = identifier[json] . identifier[JSONEncoder] ( identifier[ensure_ascii] = keyword[False] ,
identifier[separators] =( literal[string] , literal[string] ),
identifier[sort_keys] = keyword[True] ). identifier[encode] ({ literal[string] : identifier[cols] })
keyword[if] identifier[escape_url] :
identifier[suffix] = identifier[urllib] . identifier[parse] . identifier[quote] ( identifier[circuit_json] )
keyword[else] :
identifier[suffix] = identifier[circuit_json]
keyword[return] literal[string] . identifier[format] ( identifier[suffix] )
|
def circuit_to_quirk_url(circuit: circuits.Circuit, prefer_unknown_gate_to_failure: bool=False, escape_url=True) -> str:
"""Returns a Quirk URL for the given circuit.
Args:
circuit: The circuit to open in Quirk.
prefer_unknown_gate_to_failure: If not set, gates that fail to convert
will cause this function to raise an error. If set, a URL
containing bad gates will be generated. (Quirk will open the URL,
and replace the bad gates with parse errors, but still get the rest
of the circuit.)
escape_url: If set, the generated URL will have special characters such
as quotes escaped using %. This makes it possible to paste the URL
into forums and the command line and etc and have it properly
parse. If not set, the generated URL will be more compact and human
readable (and can still be pasted directly into a browser's address
bar).
Returns:
"""
circuit = circuit.copy()
linearize_circuit_qubits(circuit)
cols = [] # Type: List[List[Any]]
for moment in circuit:
can_merges = []
for op in moment.operations:
for (col, can_merge) in _to_quirk_cols(op, prefer_unknown_gate_to_failure):
if can_merge:
can_merges.append(col) # depends on [control=['if'], data=[]]
else:
cols.append(col) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['op']]
if can_merges:
merged_col = [1] * max((len(e) for e in can_merges))
for col in can_merges:
for i in range(len(col)):
if col[i] != 1:
merged_col[i] = col[i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['col']]
cols.append(merged_col) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['moment']]
circuit_json = json.JSONEncoder(ensure_ascii=False, separators=(',', ':'), sort_keys=True).encode({'cols': cols})
if escape_url:
suffix = urllib.parse.quote(circuit_json) # depends on [control=['if'], data=[]]
else:
suffix = circuit_json
return 'http://algassert.com/quirk#circuit={}'.format(suffix)
|
def get_resources_by_bins(self, bin_ids):
"""Gets the list of ``Resources`` corresponding to a list of ``Bins``.
arg: bin_ids (osid.id.IdList): list of bin ``Ids``
return: (osid.resource.ResourceList) - list of resources
raise: NullArgument - ``bin_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bins
resource_list = []
for bin_id in bin_ids:
resource_list += list(
self.get_resources_by_bin(bin_id))
return objects.ResourceList(resource_list)
|
def function[get_resources_by_bins, parameter[self, bin_ids]]:
constant[Gets the list of ``Resources`` corresponding to a list of ``Bins``.
arg: bin_ids (osid.id.IdList): list of bin ``Ids``
return: (osid.resource.ResourceList) - list of resources
raise: NullArgument - ``bin_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[resource_list] assign[=] list[[]]
for taget[name[bin_id]] in starred[name[bin_ids]] begin[:]
<ast.AugAssign object at 0x7da20e955ba0>
return[call[name[objects].ResourceList, parameter[name[resource_list]]]]
|
keyword[def] identifier[get_resources_by_bins] ( identifier[self] , identifier[bin_ids] ):
literal[string]
identifier[resource_list] =[]
keyword[for] identifier[bin_id] keyword[in] identifier[bin_ids] :
identifier[resource_list] += identifier[list] (
identifier[self] . identifier[get_resources_by_bin] ( identifier[bin_id] ))
keyword[return] identifier[objects] . identifier[ResourceList] ( identifier[resource_list] )
|
def get_resources_by_bins(self, bin_ids):
"""Gets the list of ``Resources`` corresponding to a list of ``Bins``.
arg: bin_ids (osid.id.IdList): list of bin ``Ids``
return: (osid.resource.ResourceList) - list of resources
raise: NullArgument - ``bin_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bins
resource_list = []
for bin_id in bin_ids:
resource_list += list(self.get_resources_by_bin(bin_id)) # depends on [control=['for'], data=['bin_id']]
return objects.ResourceList(resource_list)
|
def texture_array(self, size, components, data=None, *, alignment=1, dtype='f1') -> 'TextureArray':
'''
Create a :py:class:`TextureArray` object.
Args:
size (tuple): The ``(width, height, layers)`` of the texture.
components (int): The number of components 1, 2, 3 or 4.
data (bytes): Content of the texture. The size must be ``(width, height * layers)``
so each layer is stacked vertically.
Keyword Args:
alignment (int): The byte alignment 1, 2, 4 or 8.
dtype (str): Data type.
Returns:
:py:class:`Texture3D` object
'''
res = TextureArray.__new__(TextureArray)
res.mglo, res._glo = self.mglo.texture_array(size, components, data, alignment, dtype)
res._size = size
res._components = components
res._dtype = dtype
res.ctx = self
res.extra = None
return res
|
def function[texture_array, parameter[self, size, components, data]]:
constant[
Create a :py:class:`TextureArray` object.
Args:
size (tuple): The ``(width, height, layers)`` of the texture.
components (int): The number of components 1, 2, 3 or 4.
data (bytes): Content of the texture. The size must be ``(width, height * layers)``
so each layer is stacked vertically.
Keyword Args:
alignment (int): The byte alignment 1, 2, 4 or 8.
dtype (str): Data type.
Returns:
:py:class:`Texture3D` object
]
variable[res] assign[=] call[name[TextureArray].__new__, parameter[name[TextureArray]]]
<ast.Tuple object at 0x7da18f8101c0> assign[=] call[name[self].mglo.texture_array, parameter[name[size], name[components], name[data], name[alignment], name[dtype]]]
name[res]._size assign[=] name[size]
name[res]._components assign[=] name[components]
name[res]._dtype assign[=] name[dtype]
name[res].ctx assign[=] name[self]
name[res].extra assign[=] constant[None]
return[name[res]]
|
keyword[def] identifier[texture_array] ( identifier[self] , identifier[size] , identifier[components] , identifier[data] = keyword[None] ,*, identifier[alignment] = literal[int] , identifier[dtype] = literal[string] )-> literal[string] :
literal[string]
identifier[res] = identifier[TextureArray] . identifier[__new__] ( identifier[TextureArray] )
identifier[res] . identifier[mglo] , identifier[res] . identifier[_glo] = identifier[self] . identifier[mglo] . identifier[texture_array] ( identifier[size] , identifier[components] , identifier[data] , identifier[alignment] , identifier[dtype] )
identifier[res] . identifier[_size] = identifier[size]
identifier[res] . identifier[_components] = identifier[components]
identifier[res] . identifier[_dtype] = identifier[dtype]
identifier[res] . identifier[ctx] = identifier[self]
identifier[res] . identifier[extra] = keyword[None]
keyword[return] identifier[res]
|
def texture_array(self, size, components, data=None, *, alignment=1, dtype='f1') -> 'TextureArray':
"""
Create a :py:class:`TextureArray` object.
Args:
size (tuple): The ``(width, height, layers)`` of the texture.
components (int): The number of components 1, 2, 3 or 4.
data (bytes): Content of the texture. The size must be ``(width, height * layers)``
so each layer is stacked vertically.
Keyword Args:
alignment (int): The byte alignment 1, 2, 4 or 8.
dtype (str): Data type.
Returns:
:py:class:`Texture3D` object
"""
res = TextureArray.__new__(TextureArray)
(res.mglo, res._glo) = self.mglo.texture_array(size, components, data, alignment, dtype)
res._size = size
res._components = components
res._dtype = dtype
res.ctx = self
res.extra = None
return res
|
def register_plugin_dir(path):
"""Find plugins in given directory"""
import glob
for f in glob.glob(path + '/*.py'):
for k, v in load_plugins_from_module(f).items():
if k:
global_registry[k] = v
|
def function[register_plugin_dir, parameter[path]]:
constant[Find plugins in given directory]
import module[glob]
for taget[name[f]] in starred[call[name[glob].glob, parameter[binary_operation[name[path] + constant[/*.py]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b18b76d0>, <ast.Name object at 0x7da1b18b67a0>]]] in starred[call[call[name[load_plugins_from_module], parameter[name[f]]].items, parameter[]]] begin[:]
if name[k] begin[:]
call[name[global_registry]][name[k]] assign[=] name[v]
|
keyword[def] identifier[register_plugin_dir] ( identifier[path] ):
literal[string]
keyword[import] identifier[glob]
keyword[for] identifier[f] keyword[in] identifier[glob] . identifier[glob] ( identifier[path] + literal[string] ):
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[load_plugins_from_module] ( identifier[f] ). identifier[items] ():
keyword[if] identifier[k] :
identifier[global_registry] [ identifier[k] ]= identifier[v]
|
def register_plugin_dir(path):
"""Find plugins in given directory"""
import glob
for f in glob.glob(path + '/*.py'):
for (k, v) in load_plugins_from_module(f).items():
if k:
global_registry[k] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['f']]
|
def load(schema, uri=None, spec=None, provider=None):
"""Scaffold a validator against a schema.
:param schema: the schema to compile into a Validator
:type schema: Mapping
:param uri: the uri of the schema.
it may be ignored in case of not cross
referencing.
:type uri: Pointer, str
:param spec: fallback to this spec if the schema does not provides ts own
:type spec: str
:param provider: the other schemas, in case of cross
referencing
:type provider: Mapping, Provider...
"""
factory = Factory(provider, spec)
return factory(schema, uri or '#')
|
def function[load, parameter[schema, uri, spec, provider]]:
constant[Scaffold a validator against a schema.
:param schema: the schema to compile into a Validator
:type schema: Mapping
:param uri: the uri of the schema.
it may be ignored in case of not cross
referencing.
:type uri: Pointer, str
:param spec: fallback to this spec if the schema does not provides ts own
:type spec: str
:param provider: the other schemas, in case of cross
referencing
:type provider: Mapping, Provider...
]
variable[factory] assign[=] call[name[Factory], parameter[name[provider], name[spec]]]
return[call[name[factory], parameter[name[schema], <ast.BoolOp object at 0x7da1b23712a0>]]]
|
keyword[def] identifier[load] ( identifier[schema] , identifier[uri] = keyword[None] , identifier[spec] = keyword[None] , identifier[provider] = keyword[None] ):
literal[string]
identifier[factory] = identifier[Factory] ( identifier[provider] , identifier[spec] )
keyword[return] identifier[factory] ( identifier[schema] , identifier[uri] keyword[or] literal[string] )
|
def load(schema, uri=None, spec=None, provider=None):
"""Scaffold a validator against a schema.
:param schema: the schema to compile into a Validator
:type schema: Mapping
:param uri: the uri of the schema.
it may be ignored in case of not cross
referencing.
:type uri: Pointer, str
:param spec: fallback to this spec if the schema does not provides ts own
:type spec: str
:param provider: the other schemas, in case of cross
referencing
:type provider: Mapping, Provider...
"""
factory = Factory(provider, spec)
return factory(schema, uri or '#')
|
def _parse_canonical_minkey(doc):
"""Decode a JSON MinKey to bson.min_key.MinKey."""
if doc['$minKey'] is not 1:
raise TypeError('$minKey value must be 1: %s' % (doc,))
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
return MinKey()
|
def function[_parse_canonical_minkey, parameter[doc]]:
constant[Decode a JSON MinKey to bson.min_key.MinKey.]
if compare[call[name[doc]][constant[$minKey]] is_not constant[1]] begin[:]
<ast.Raise object at 0x7da18f00c400>
if compare[call[name[len], parameter[name[doc]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da18f00cf10>
return[call[name[MinKey], parameter[]]]
|
keyword[def] identifier[_parse_canonical_minkey] ( identifier[doc] ):
literal[string]
keyword[if] identifier[doc] [ literal[string] ] keyword[is] keyword[not] literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[doc] ,))
keyword[if] identifier[len] ( identifier[doc] )!= literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[doc] ,))
keyword[return] identifier[MinKey] ()
|
def _parse_canonical_minkey(doc):
"""Decode a JSON MinKey to bson.min_key.MinKey."""
if doc['$minKey'] is not 1:
raise TypeError('$minKey value must be 1: %s' % (doc,)) # depends on [control=['if'], data=[]]
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,)) # depends on [control=['if'], data=[]]
return MinKey()
|
def calcHumWealthAndBoundingMPCs(self):
'''
Calculates human wealth and the maximum and minimum MPC for each current
period state, then stores them as attributes of self for use by other methods.
Parameters
----------
none
Returns
-------
none
'''
# Upper bound on MPC at lower m-bound
WorstIncPrb_array = self.BoroCnstDependency*np.tile(np.reshape(self.WorstIncPrbAll,
(1,self.StateCount)),(self.StateCount,1))
temp_array = self.MrkvArray*WorstIncPrb_array
WorstIncPrbNow = np.sum(temp_array,axis=1) # Probability of getting the "worst" income shock and transition from each current state
ExMPCmaxNext = (np.dot(temp_array,self.Rfree_list**(1.0-self.CRRA)*
self.solution_next.MPCmax**(-self.CRRA))/WorstIncPrbNow)**\
(-1.0/self.CRRA)
DiscFacEff_temp = self.DiscFac*self.LivPrb
self.MPCmaxNow = 1.0/(1.0 + ((DiscFacEff_temp*WorstIncPrbNow)**
(1.0/self.CRRA))/ExMPCmaxNext)
self.MPCmaxEff = self.MPCmaxNow
self.MPCmaxEff[self.BoroCnstNat_list < self.mNrmMin_list] = 1.0
# State-conditional PDV of human wealth
hNrmPlusIncNext = self.ExIncNextAll + self.solution_next.hNrm
self.hNrmNow = np.dot(self.MrkvArray,(self.PermGroFac_list/self.Rfree_list)*
hNrmPlusIncNext)
# Lower bound on MPC as m gets arbitrarily large
temp = (DiscFacEff_temp*np.dot(self.MrkvArray,self.solution_next.MPCmin**
(-self.CRRA)*self.Rfree_list**(1.0-self.CRRA)))**(1.0/self.CRRA)
self.MPCminNow = 1.0/(1.0 + temp)
|
def function[calcHumWealthAndBoundingMPCs, parameter[self]]:
constant[
Calculates human wealth and the maximum and minimum MPC for each current
period state, then stores them as attributes of self for use by other methods.
Parameters
----------
none
Returns
-------
none
]
variable[WorstIncPrb_array] assign[=] binary_operation[name[self].BoroCnstDependency * call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].WorstIncPrbAll, tuple[[<ast.Constant object at 0x7da2049628f0>, <ast.Attribute object at 0x7da204963ee0>]]]], tuple[[<ast.Attribute object at 0x7da204961f30>, <ast.Constant object at 0x7da204960580>]]]]]
variable[temp_array] assign[=] binary_operation[name[self].MrkvArray * name[WorstIncPrb_array]]
variable[WorstIncPrbNow] assign[=] call[name[np].sum, parameter[name[temp_array]]]
variable[ExMPCmaxNext] assign[=] binary_operation[binary_operation[call[name[np].dot, parameter[name[temp_array], binary_operation[binary_operation[name[self].Rfree_list ** binary_operation[constant[1.0] - name[self].CRRA]] * binary_operation[name[self].solution_next.MPCmax ** <ast.UnaryOp object at 0x7da204960b50>]]]] / name[WorstIncPrbNow]] ** binary_operation[<ast.UnaryOp object at 0x7da204962bf0> / name[self].CRRA]]
variable[DiscFacEff_temp] assign[=] binary_operation[name[self].DiscFac * name[self].LivPrb]
name[self].MPCmaxNow assign[=] binary_operation[constant[1.0] / binary_operation[constant[1.0] + binary_operation[binary_operation[binary_operation[name[DiscFacEff_temp] * name[WorstIncPrbNow]] ** binary_operation[constant[1.0] / name[self].CRRA]] / name[ExMPCmaxNext]]]]
name[self].MPCmaxEff assign[=] name[self].MPCmaxNow
call[name[self].MPCmaxEff][compare[name[self].BoroCnstNat_list less[<] name[self].mNrmMin_list]] assign[=] constant[1.0]
variable[hNrmPlusIncNext] assign[=] binary_operation[name[self].ExIncNextAll + name[self].solution_next.hNrm]
name[self].hNrmNow assign[=] call[name[np].dot, parameter[name[self].MrkvArray, binary_operation[binary_operation[name[self].PermGroFac_list / name[self].Rfree_list] * name[hNrmPlusIncNext]]]]
variable[temp] assign[=] binary_operation[binary_operation[name[DiscFacEff_temp] * call[name[np].dot, parameter[name[self].MrkvArray, binary_operation[binary_operation[name[self].solution_next.MPCmin ** <ast.UnaryOp object at 0x7da18c4ce890>] * binary_operation[name[self].Rfree_list ** binary_operation[constant[1.0] - name[self].CRRA]]]]]] ** binary_operation[constant[1.0] / name[self].CRRA]]
name[self].MPCminNow assign[=] binary_operation[constant[1.0] / binary_operation[constant[1.0] + name[temp]]]
|
keyword[def] identifier[calcHumWealthAndBoundingMPCs] ( identifier[self] ):
literal[string]
identifier[WorstIncPrb_array] = identifier[self] . identifier[BoroCnstDependency] * identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[WorstIncPrbAll] ,
( literal[int] , identifier[self] . identifier[StateCount] )),( identifier[self] . identifier[StateCount] , literal[int] ))
identifier[temp_array] = identifier[self] . identifier[MrkvArray] * identifier[WorstIncPrb_array]
identifier[WorstIncPrbNow] = identifier[np] . identifier[sum] ( identifier[temp_array] , identifier[axis] = literal[int] )
identifier[ExMPCmaxNext] =( identifier[np] . identifier[dot] ( identifier[temp_array] , identifier[self] . identifier[Rfree_list] **( literal[int] - identifier[self] . identifier[CRRA] )*
identifier[self] . identifier[solution_next] . identifier[MPCmax] **(- identifier[self] . identifier[CRRA] ))/ identifier[WorstIncPrbNow] )**(- literal[int] / identifier[self] . identifier[CRRA] )
identifier[DiscFacEff_temp] = identifier[self] . identifier[DiscFac] * identifier[self] . identifier[LivPrb]
identifier[self] . identifier[MPCmaxNow] = literal[int] /( literal[int] +(( identifier[DiscFacEff_temp] * identifier[WorstIncPrbNow] )**
( literal[int] / identifier[self] . identifier[CRRA] ))/ identifier[ExMPCmaxNext] )
identifier[self] . identifier[MPCmaxEff] = identifier[self] . identifier[MPCmaxNow]
identifier[self] . identifier[MPCmaxEff] [ identifier[self] . identifier[BoroCnstNat_list] < identifier[self] . identifier[mNrmMin_list] ]= literal[int]
identifier[hNrmPlusIncNext] = identifier[self] . identifier[ExIncNextAll] + identifier[self] . identifier[solution_next] . identifier[hNrm]
identifier[self] . identifier[hNrmNow] = identifier[np] . identifier[dot] ( identifier[self] . identifier[MrkvArray] ,( identifier[self] . identifier[PermGroFac_list] / identifier[self] . identifier[Rfree_list] )*
identifier[hNrmPlusIncNext] )
identifier[temp] =( identifier[DiscFacEff_temp] * identifier[np] . identifier[dot] ( identifier[self] . identifier[MrkvArray] , identifier[self] . identifier[solution_next] . identifier[MPCmin] **
(- identifier[self] . identifier[CRRA] )* identifier[self] . identifier[Rfree_list] **( literal[int] - identifier[self] . identifier[CRRA] )))**( literal[int] / identifier[self] . identifier[CRRA] )
identifier[self] . identifier[MPCminNow] = literal[int] /( literal[int] + identifier[temp] )
|
def calcHumWealthAndBoundingMPCs(self):
"""
Calculates human wealth and the maximum and minimum MPC for each current
period state, then stores them as attributes of self for use by other methods.
Parameters
----------
none
Returns
-------
none
"""
# Upper bound on MPC at lower m-bound
WorstIncPrb_array = self.BoroCnstDependency * np.tile(np.reshape(self.WorstIncPrbAll, (1, self.StateCount)), (self.StateCount, 1))
temp_array = self.MrkvArray * WorstIncPrb_array
WorstIncPrbNow = np.sum(temp_array, axis=1) # Probability of getting the "worst" income shock and transition from each current state
ExMPCmaxNext = (np.dot(temp_array, self.Rfree_list ** (1.0 - self.CRRA) * self.solution_next.MPCmax ** (-self.CRRA)) / WorstIncPrbNow) ** (-1.0 / self.CRRA)
DiscFacEff_temp = self.DiscFac * self.LivPrb
self.MPCmaxNow = 1.0 / (1.0 + (DiscFacEff_temp * WorstIncPrbNow) ** (1.0 / self.CRRA) / ExMPCmaxNext)
self.MPCmaxEff = self.MPCmaxNow
self.MPCmaxEff[self.BoroCnstNat_list < self.mNrmMin_list] = 1.0
# State-conditional PDV of human wealth
hNrmPlusIncNext = self.ExIncNextAll + self.solution_next.hNrm
self.hNrmNow = np.dot(self.MrkvArray, self.PermGroFac_list / self.Rfree_list * hNrmPlusIncNext)
# Lower bound on MPC as m gets arbitrarily large
temp = (DiscFacEff_temp * np.dot(self.MrkvArray, self.solution_next.MPCmin ** (-self.CRRA) * self.Rfree_list ** (1.0 - self.CRRA))) ** (1.0 / self.CRRA)
self.MPCminNow = 1.0 / (1.0 + temp)
|
def handle_check(self, command, **options):
"""Check your settings for common misconfigurations"""
passed = True
client = DjangoClient(metrics_interval="0ms")
def is_set(x):
return x and x != "None"
# check if org/app is set:
if is_set(client.config.service_name):
self.write("Service name is set, good job!", green)
else:
passed = False
self.write("Configuration errors detected!", red, ending="\n\n")
self.write(" * SERVICE_NAME not set! ", red, ending="\n")
self.write(CONFIG_EXAMPLE)
# secret token is optional but recommended
if not is_set(client.config.secret_token):
self.write(" * optional SECRET_TOKEN not set", yellow, ending="\n")
self.write("")
server_url = client.config.server_url
if server_url:
parsed_url = urlparse.urlparse(server_url)
if parsed_url.scheme.lower() in ("http", "https"):
# parse netloc, making sure people did not supply basic auth
if "@" in parsed_url.netloc:
credentials, _, path = parsed_url.netloc.rpartition("@")
passed = False
self.write("Configuration errors detected!", red, ending="\n\n")
if ":" in credentials:
self.write(" * SERVER_URL cannot contain authentication " "credentials", red, ending="\n")
else:
self.write(
" * SERVER_URL contains an unexpected at-sign!"
" This is usually used for basic authentication, "
"but the colon is left out",
red,
ending="\n",
)
else:
self.write("SERVER_URL {0} looks fine".format(server_url), green)
# secret token in the clear not recommended
if is_set(client.config.secret_token) and parsed_url.scheme.lower() == "http":
self.write(" * SECRET_TOKEN set but server not using https", yellow, ending="\n")
else:
self.write(
" * SERVER_URL has scheme {0} and we require " "http or https!".format(parsed_url.scheme),
red,
ending="\n",
)
passed = False
else:
self.write("Configuration errors detected!", red, ending="\n\n")
self.write(" * SERVER_URL appears to be empty", red, ending="\n")
passed = False
self.write("")
# check if we're disabled due to DEBUG:
if settings.DEBUG:
if getattr(settings, "ELASTIC_APM", {}).get("DEBUG"):
self.write(
"Note: even though you are running in DEBUG mode, we will "
'send data to the APM Server, because you set ELASTIC_APM["DEBUG"] to '
"True. You can disable ElasticAPM while in DEBUG mode like this"
"\n\n",
yellow,
)
self.write(
" ELASTIC_APM = {\n"
' "DEBUG": False,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
else:
self.write(
"Looks like you're running in DEBUG mode. ElasticAPM will NOT "
"gather any data while DEBUG is set to True.\n\n",
red,
)
self.write(
"If you want to test ElasticAPM while DEBUG is set to True, you"
" can force ElasticAPM to gather data by setting"
' ELASTIC_APM["DEBUG"] to True, like this\n\n'
" ELASTIC_APM = {\n"
' "DEBUG": True,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
passed = False
else:
self.write("DEBUG mode is disabled! Looking good!", green)
self.write("")
# check if middleware is set, and if it is at the first position
middleware_attr = "MIDDLEWARE" if getattr(settings, "MIDDLEWARE", None) is not None else "MIDDLEWARE_CLASSES"
middleware = list(getattr(settings, middleware_attr))
try:
pos = middleware.index("elasticapm.contrib.django.middleware.TracingMiddleware")
if pos == 0:
self.write("Tracing middleware is configured! Awesome!", green)
else:
self.write("Tracing middleware is configured, but not at the first position\n", yellow)
self.write("ElasticAPM works best if you add it at the top of your %s setting" % middleware_attr)
except ValueError:
self.write("Tracing middleware not configured!", red)
self.write(
"\n"
"Add it to your %(name)s setting like this:\n\n"
" %(name)s = (\n"
' "elasticapm.contrib.django.middleware.TracingMiddleware",\n'
" # your other middleware classes\n"
" )\n" % {"name": middleware_attr}
)
self.write("")
if passed:
self.write("Looks like everything should be ready!", green)
else:
self.write("Please fix the above errors.", red)
self.write("")
client.close()
return passed
|
def function[handle_check, parameter[self, command]]:
constant[Check your settings for common misconfigurations]
variable[passed] assign[=] constant[True]
variable[client] assign[=] call[name[DjangoClient], parameter[]]
def function[is_set, parameter[x]]:
return[<ast.BoolOp object at 0x7da1b1b85450>]
if call[name[is_set], parameter[name[client].config.service_name]] begin[:]
call[name[self].write, parameter[constant[Service name is set, good job!], name[green]]]
if <ast.UnaryOp object at 0x7da1b1b84250> begin[:]
call[name[self].write, parameter[constant[ * optional SECRET_TOKEN not set], name[yellow]]]
call[name[self].write, parameter[constant[]]]
variable[server_url] assign[=] name[client].config.server_url
if name[server_url] begin[:]
variable[parsed_url] assign[=] call[name[urlparse].urlparse, parameter[name[server_url]]]
if compare[call[name[parsed_url].scheme.lower, parameter[]] in tuple[[<ast.Constant object at 0x7da1b1b634f0>, <ast.Constant object at 0x7da1b1b63ac0>]]] begin[:]
if compare[constant[@] in name[parsed_url].netloc] begin[:]
<ast.Tuple object at 0x7da1b1b63910> assign[=] call[name[parsed_url].netloc.rpartition, parameter[constant[@]]]
variable[passed] assign[=] constant[False]
call[name[self].write, parameter[constant[Configuration errors detected!], name[red]]]
if compare[constant[:] in name[credentials]] begin[:]
call[name[self].write, parameter[constant[ * SERVER_URL cannot contain authentication credentials], name[red]]]
if <ast.BoolOp object at 0x7da1b1b85c90> begin[:]
call[name[self].write, parameter[constant[ * SECRET_TOKEN set but server not using https], name[yellow]]]
call[name[self].write, parameter[constant[]]]
if name[settings].DEBUG begin[:]
if call[call[name[getattr], parameter[name[settings], constant[ELASTIC_APM], dictionary[[], []]]].get, parameter[constant[DEBUG]]] begin[:]
call[name[self].write, parameter[constant[Note: even though you are running in DEBUG mode, we will send data to the APM Server, because you set ELASTIC_APM["DEBUG"] to True. You can disable ElasticAPM while in DEBUG mode like this
], name[yellow]]]
call[name[self].write, parameter[constant[ ELASTIC_APM = {
"DEBUG": False,
# your other ELASTIC_APM settings
}]]]
call[name[self].write, parameter[constant[]]]
variable[middleware_attr] assign[=] <ast.IfExp object at 0x7da1b1b85ba0>
variable[middleware] assign[=] call[name[list], parameter[call[name[getattr], parameter[name[settings], name[middleware_attr]]]]]
<ast.Try object at 0x7da1b1b841f0>
call[name[self].write, parameter[constant[]]]
if name[passed] begin[:]
call[name[self].write, parameter[constant[Looks like everything should be ready!], name[green]]]
call[name[self].write, parameter[constant[]]]
call[name[client].close, parameter[]]
return[name[passed]]
|
keyword[def] identifier[handle_check] ( identifier[self] , identifier[command] ,** identifier[options] ):
literal[string]
identifier[passed] = keyword[True]
identifier[client] = identifier[DjangoClient] ( identifier[metrics_interval] = literal[string] )
keyword[def] identifier[is_set] ( identifier[x] ):
keyword[return] identifier[x] keyword[and] identifier[x] != literal[string]
keyword[if] identifier[is_set] ( identifier[client] . identifier[config] . identifier[service_name] ):
identifier[self] . identifier[write] ( literal[string] , identifier[green] )
keyword[else] :
identifier[passed] = keyword[False]
identifier[self] . identifier[write] ( literal[string] , identifier[red] , identifier[ending] = literal[string] )
identifier[self] . identifier[write] ( literal[string] , identifier[red] , identifier[ending] = literal[string] )
identifier[self] . identifier[write] ( identifier[CONFIG_EXAMPLE] )
keyword[if] keyword[not] identifier[is_set] ( identifier[client] . identifier[config] . identifier[secret_token] ):
identifier[self] . identifier[write] ( literal[string] , identifier[yellow] , identifier[ending] = literal[string] )
identifier[self] . identifier[write] ( literal[string] )
identifier[server_url] = identifier[client] . identifier[config] . identifier[server_url]
keyword[if] identifier[server_url] :
identifier[parsed_url] = identifier[urlparse] . identifier[urlparse] ( identifier[server_url] )
keyword[if] identifier[parsed_url] . identifier[scheme] . identifier[lower] () keyword[in] ( literal[string] , literal[string] ):
keyword[if] literal[string] keyword[in] identifier[parsed_url] . identifier[netloc] :
identifier[credentials] , identifier[_] , identifier[path] = identifier[parsed_url] . identifier[netloc] . identifier[rpartition] ( literal[string] )
identifier[passed] = keyword[False]
identifier[self] . identifier[write] ( literal[string] , identifier[red] , identifier[ending] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[credentials] :
identifier[self] . identifier[write] ( literal[string] literal[string] , identifier[red] , identifier[ending] = literal[string] )
keyword[else] :
identifier[self] . identifier[write] (
literal[string]
literal[string]
literal[string] ,
identifier[red] ,
identifier[ending] = literal[string] ,
)
keyword[else] :
identifier[self] . identifier[write] ( literal[string] . identifier[format] ( identifier[server_url] ), identifier[green] )
keyword[if] identifier[is_set] ( identifier[client] . identifier[config] . identifier[secret_token] ) keyword[and] identifier[parsed_url] . identifier[scheme] . identifier[lower] ()== literal[string] :
identifier[self] . identifier[write] ( literal[string] , identifier[yellow] , identifier[ending] = literal[string] )
keyword[else] :
identifier[self] . identifier[write] (
literal[string] literal[string] . identifier[format] ( identifier[parsed_url] . identifier[scheme] ),
identifier[red] ,
identifier[ending] = literal[string] ,
)
identifier[passed] = keyword[False]
keyword[else] :
identifier[self] . identifier[write] ( literal[string] , identifier[red] , identifier[ending] = literal[string] )
identifier[self] . identifier[write] ( literal[string] , identifier[red] , identifier[ending] = literal[string] )
identifier[passed] = keyword[False]
identifier[self] . identifier[write] ( literal[string] )
keyword[if] identifier[settings] . identifier[DEBUG] :
keyword[if] identifier[getattr] ( identifier[settings] , literal[string] ,{}). identifier[get] ( literal[string] ):
identifier[self] . identifier[write] (
literal[string]
literal[string]
literal[string]
literal[string] ,
identifier[yellow] ,
)
identifier[self] . identifier[write] (
literal[string]
literal[string]
literal[string]
literal[string]
)
keyword[else] :
identifier[self] . identifier[write] (
literal[string]
literal[string] ,
identifier[red] ,
)
identifier[self] . identifier[write] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
)
identifier[passed] = keyword[False]
keyword[else] :
identifier[self] . identifier[write] ( literal[string] , identifier[green] )
identifier[self] . identifier[write] ( literal[string] )
identifier[middleware_attr] = literal[string] keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
identifier[middleware] = identifier[list] ( identifier[getattr] ( identifier[settings] , identifier[middleware_attr] ))
keyword[try] :
identifier[pos] = identifier[middleware] . identifier[index] ( literal[string] )
keyword[if] identifier[pos] == literal[int] :
identifier[self] . identifier[write] ( literal[string] , identifier[green] )
keyword[else] :
identifier[self] . identifier[write] ( literal[string] , identifier[yellow] )
identifier[self] . identifier[write] ( literal[string] % identifier[middleware_attr] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[write] ( literal[string] , identifier[red] )
identifier[self] . identifier[write] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] %{ literal[string] : identifier[middleware_attr] }
)
identifier[self] . identifier[write] ( literal[string] )
keyword[if] identifier[passed] :
identifier[self] . identifier[write] ( literal[string] , identifier[green] )
keyword[else] :
identifier[self] . identifier[write] ( literal[string] , identifier[red] )
identifier[self] . identifier[write] ( literal[string] )
identifier[client] . identifier[close] ()
keyword[return] identifier[passed]
|
def handle_check(self, command, **options):
"""Check your settings for common misconfigurations"""
passed = True
client = DjangoClient(metrics_interval='0ms')
def is_set(x):
return x and x != 'None'
# check if org/app is set:
if is_set(client.config.service_name):
self.write('Service name is set, good job!', green) # depends on [control=['if'], data=[]]
else:
passed = False
self.write('Configuration errors detected!', red, ending='\n\n')
self.write(' * SERVICE_NAME not set! ', red, ending='\n')
self.write(CONFIG_EXAMPLE)
# secret token is optional but recommended
if not is_set(client.config.secret_token):
self.write(' * optional SECRET_TOKEN not set', yellow, ending='\n') # depends on [control=['if'], data=[]]
self.write('')
server_url = client.config.server_url
if server_url:
parsed_url = urlparse.urlparse(server_url)
if parsed_url.scheme.lower() in ('http', 'https'):
# parse netloc, making sure people did not supply basic auth
if '@' in parsed_url.netloc:
(credentials, _, path) = parsed_url.netloc.rpartition('@')
passed = False
self.write('Configuration errors detected!', red, ending='\n\n')
if ':' in credentials:
self.write(' * SERVER_URL cannot contain authentication credentials', red, ending='\n') # depends on [control=['if'], data=[]]
else:
self.write(' * SERVER_URL contains an unexpected at-sign! This is usually used for basic authentication, but the colon is left out', red, ending='\n') # depends on [control=['if'], data=[]]
else:
self.write('SERVER_URL {0} looks fine'.format(server_url), green)
# secret token in the clear not recommended
if is_set(client.config.secret_token) and parsed_url.scheme.lower() == 'http':
self.write(' * SECRET_TOKEN set but server not using https', yellow, ending='\n') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.write(' * SERVER_URL has scheme {0} and we require http or https!'.format(parsed_url.scheme), red, ending='\n')
passed = False # depends on [control=['if'], data=[]]
else:
self.write('Configuration errors detected!', red, ending='\n\n')
self.write(' * SERVER_URL appears to be empty', red, ending='\n')
passed = False
self.write('')
# check if we're disabled due to DEBUG:
if settings.DEBUG:
if getattr(settings, 'ELASTIC_APM', {}).get('DEBUG'):
self.write('Note: even though you are running in DEBUG mode, we will send data to the APM Server, because you set ELASTIC_APM["DEBUG"] to True. You can disable ElasticAPM while in DEBUG mode like this\n\n', yellow)
self.write(' ELASTIC_APM = {\n "DEBUG": False,\n # your other ELASTIC_APM settings\n }') # depends on [control=['if'], data=[]]
else:
self.write("Looks like you're running in DEBUG mode. ElasticAPM will NOT gather any data while DEBUG is set to True.\n\n", red)
self.write('If you want to test ElasticAPM while DEBUG is set to True, you can force ElasticAPM to gather data by setting ELASTIC_APM["DEBUG"] to True, like this\n\n ELASTIC_APM = {\n "DEBUG": True,\n # your other ELASTIC_APM settings\n }')
passed = False # depends on [control=['if'], data=[]]
else:
self.write('DEBUG mode is disabled! Looking good!', green)
self.write('')
# check if middleware is set, and if it is at the first position
middleware_attr = 'MIDDLEWARE' if getattr(settings, 'MIDDLEWARE', None) is not None else 'MIDDLEWARE_CLASSES'
middleware = list(getattr(settings, middleware_attr))
try:
pos = middleware.index('elasticapm.contrib.django.middleware.TracingMiddleware')
if pos == 0:
self.write('Tracing middleware is configured! Awesome!', green) # depends on [control=['if'], data=[]]
else:
self.write('Tracing middleware is configured, but not at the first position\n', yellow)
self.write('ElasticAPM works best if you add it at the top of your %s setting' % middleware_attr) # depends on [control=['try'], data=[]]
except ValueError:
self.write('Tracing middleware not configured!', red)
self.write('\nAdd it to your %(name)s setting like this:\n\n %(name)s = (\n "elasticapm.contrib.django.middleware.TracingMiddleware",\n # your other middleware classes\n )\n' % {'name': middleware_attr}) # depends on [control=['except'], data=[]]
self.write('')
if passed:
self.write('Looks like everything should be ready!', green) # depends on [control=['if'], data=[]]
else:
self.write('Please fix the above errors.', red)
self.write('')
client.close()
return passed
|
def export_saved_model(self, sess, export_dir, tag_set, signatures):
"""Convenience function to access ``TFNode.export_saved_model`` directly from this object instance."""
TFNode.export_saved_model(sess, export_dir, tag_set, signatures)
|
def function[export_saved_model, parameter[self, sess, export_dir, tag_set, signatures]]:
constant[Convenience function to access ``TFNode.export_saved_model`` directly from this object instance.]
call[name[TFNode].export_saved_model, parameter[name[sess], name[export_dir], name[tag_set], name[signatures]]]
|
keyword[def] identifier[export_saved_model] ( identifier[self] , identifier[sess] , identifier[export_dir] , identifier[tag_set] , identifier[signatures] ):
literal[string]
identifier[TFNode] . identifier[export_saved_model] ( identifier[sess] , identifier[export_dir] , identifier[tag_set] , identifier[signatures] )
|
def export_saved_model(self, sess, export_dir, tag_set, signatures):
"""Convenience function to access ``TFNode.export_saved_model`` directly from this object instance."""
TFNode.export_saved_model(sess, export_dir, tag_set, signatures)
|
def addStyle(w):
"""
Styles the GUI: global fonts and colours.
Parameters
----------
w : tkinter.tk
widget element to style
"""
# access global container in root widget
root = get_root(w)
g = root.globals
fsize = g.cpars['font_size']
family = g.cpars['font_family']
# Default font
g.DEFAULT_FONT = font.nametofont("TkDefaultFont")
g.DEFAULT_FONT.configure(size=fsize, weight='bold', family=family)
w.option_add('*Font', g.DEFAULT_FONT)
# Menu font
g.MENU_FONT = font.nametofont("TkMenuFont")
g.MENU_FONT.configure(family=family)
w.option_add('*Menu.Font', g.MENU_FONT)
# Entry font
g.ENTRY_FONT = font.nametofont("TkTextFont")
g.ENTRY_FONT.configure(size=fsize, family=family)
w.option_add('*Entry.Font', g.ENTRY_FONT)
# position and size
# root.geometry("320x240+325+200")
# Default colours. Note there is a difference between
# specifying 'background' with a capital B or lowercase b
w.option_add('*background', g.COL['main'])
w.option_add('*HighlightBackground', g.COL['main'])
w.config(background=g.COL['main'])
|
def function[addStyle, parameter[w]]:
constant[
Styles the GUI: global fonts and colours.
Parameters
----------
w : tkinter.tk
widget element to style
]
variable[root] assign[=] call[name[get_root], parameter[name[w]]]
variable[g] assign[=] name[root].globals
variable[fsize] assign[=] call[name[g].cpars][constant[font_size]]
variable[family] assign[=] call[name[g].cpars][constant[font_family]]
name[g].DEFAULT_FONT assign[=] call[name[font].nametofont, parameter[constant[TkDefaultFont]]]
call[name[g].DEFAULT_FONT.configure, parameter[]]
call[name[w].option_add, parameter[constant[*Font], name[g].DEFAULT_FONT]]
name[g].MENU_FONT assign[=] call[name[font].nametofont, parameter[constant[TkMenuFont]]]
call[name[g].MENU_FONT.configure, parameter[]]
call[name[w].option_add, parameter[constant[*Menu.Font], name[g].MENU_FONT]]
name[g].ENTRY_FONT assign[=] call[name[font].nametofont, parameter[constant[TkTextFont]]]
call[name[g].ENTRY_FONT.configure, parameter[]]
call[name[w].option_add, parameter[constant[*Entry.Font], name[g].ENTRY_FONT]]
call[name[w].option_add, parameter[constant[*background], call[name[g].COL][constant[main]]]]
call[name[w].option_add, parameter[constant[*HighlightBackground], call[name[g].COL][constant[main]]]]
call[name[w].config, parameter[]]
|
keyword[def] identifier[addStyle] ( identifier[w] ):
literal[string]
identifier[root] = identifier[get_root] ( identifier[w] )
identifier[g] = identifier[root] . identifier[globals]
identifier[fsize] = identifier[g] . identifier[cpars] [ literal[string] ]
identifier[family] = identifier[g] . identifier[cpars] [ literal[string] ]
identifier[g] . identifier[DEFAULT_FONT] = identifier[font] . identifier[nametofont] ( literal[string] )
identifier[g] . identifier[DEFAULT_FONT] . identifier[configure] ( identifier[size] = identifier[fsize] , identifier[weight] = literal[string] , identifier[family] = identifier[family] )
identifier[w] . identifier[option_add] ( literal[string] , identifier[g] . identifier[DEFAULT_FONT] )
identifier[g] . identifier[MENU_FONT] = identifier[font] . identifier[nametofont] ( literal[string] )
identifier[g] . identifier[MENU_FONT] . identifier[configure] ( identifier[family] = identifier[family] )
identifier[w] . identifier[option_add] ( literal[string] , identifier[g] . identifier[MENU_FONT] )
identifier[g] . identifier[ENTRY_FONT] = identifier[font] . identifier[nametofont] ( literal[string] )
identifier[g] . identifier[ENTRY_FONT] . identifier[configure] ( identifier[size] = identifier[fsize] , identifier[family] = identifier[family] )
identifier[w] . identifier[option_add] ( literal[string] , identifier[g] . identifier[ENTRY_FONT] )
identifier[w] . identifier[option_add] ( literal[string] , identifier[g] . identifier[COL] [ literal[string] ])
identifier[w] . identifier[option_add] ( literal[string] , identifier[g] . identifier[COL] [ literal[string] ])
identifier[w] . identifier[config] ( identifier[background] = identifier[g] . identifier[COL] [ literal[string] ])
|
def addStyle(w):
"""
Styles the GUI: global fonts and colours.
Parameters
----------
w : tkinter.tk
widget element to style
"""
# access global container in root widget
root = get_root(w)
g = root.globals
fsize = g.cpars['font_size']
family = g.cpars['font_family']
# Default font
g.DEFAULT_FONT = font.nametofont('TkDefaultFont')
g.DEFAULT_FONT.configure(size=fsize, weight='bold', family=family)
w.option_add('*Font', g.DEFAULT_FONT)
# Menu font
g.MENU_FONT = font.nametofont('TkMenuFont')
g.MENU_FONT.configure(family=family)
w.option_add('*Menu.Font', g.MENU_FONT)
# Entry font
g.ENTRY_FONT = font.nametofont('TkTextFont')
g.ENTRY_FONT.configure(size=fsize, family=family)
w.option_add('*Entry.Font', g.ENTRY_FONT)
# position and size
# root.geometry("320x240+325+200")
# Default colours. Note there is a difference between
# specifying 'background' with a capital B or lowercase b
w.option_add('*background', g.COL['main'])
w.option_add('*HighlightBackground', g.COL['main'])
w.config(background=g.COL['main'])
|
def electric_field_amplitude_intensity(s0, Isat=16.6889462814,
Omega=1e6, units="ad-hoc"):
"""Return the amplitude of the electric field for saturation parameter.
This is at a given saturation parameter s0=I/Isat, where I0 is by default \
Isat=16.6889462814 m/m^2 is the saturation intensity of the D2 line of \
rubidium for circularly polarized light. Optionally, a frequency scale \
`Omega` can be provided.
>>> print(electric_field_amplitude_intensity(1.0, units="ad-hoc"))
9.0152984553
>>> print(electric_field_amplitude_intensity(1.0, Omega=1.0, units="SI"))
112.135917207
>>> print(electric_field_amplitude_intensity(1.0, units="SI"))
0.000112135917207
"""
E0_sat = sqrt(2*mu0*c*Isat)/Omega
if units == "ad-hoc":
e0 = hbar/(e*a0) # This is the electric field scale.
E0_sat = E0_sat/e0
return E0_sat*sqrt(s0)
|
def function[electric_field_amplitude_intensity, parameter[s0, Isat, Omega, units]]:
constant[Return the amplitude of the electric field for saturation parameter.
This is at a given saturation parameter s0=I/Isat, where I0 is by default Isat=16.6889462814 m/m^2 is the saturation intensity of the D2 line of rubidium for circularly polarized light. Optionally, a frequency scale `Omega` can be provided.
>>> print(electric_field_amplitude_intensity(1.0, units="ad-hoc"))
9.0152984553
>>> print(electric_field_amplitude_intensity(1.0, Omega=1.0, units="SI"))
112.135917207
>>> print(electric_field_amplitude_intensity(1.0, units="SI"))
0.000112135917207
]
variable[E0_sat] assign[=] binary_operation[call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[constant[2] * name[mu0]] * name[c]] * name[Isat]]]] / name[Omega]]
if compare[name[units] equal[==] constant[ad-hoc]] begin[:]
variable[e0] assign[=] binary_operation[name[hbar] / binary_operation[name[e] * name[a0]]]
variable[E0_sat] assign[=] binary_operation[name[E0_sat] / name[e0]]
return[binary_operation[name[E0_sat] * call[name[sqrt], parameter[name[s0]]]]]
|
keyword[def] identifier[electric_field_amplitude_intensity] ( identifier[s0] , identifier[Isat] = literal[int] ,
identifier[Omega] = literal[int] , identifier[units] = literal[string] ):
literal[string]
identifier[E0_sat] = identifier[sqrt] ( literal[int] * identifier[mu0] * identifier[c] * identifier[Isat] )/ identifier[Omega]
keyword[if] identifier[units] == literal[string] :
identifier[e0] = identifier[hbar] /( identifier[e] * identifier[a0] )
identifier[E0_sat] = identifier[E0_sat] / identifier[e0]
keyword[return] identifier[E0_sat] * identifier[sqrt] ( identifier[s0] )
|
def electric_field_amplitude_intensity(s0, Isat=16.6889462814, Omega=1000000.0, units='ad-hoc'):
"""Return the amplitude of the electric field for saturation parameter.
This is at a given saturation parameter s0=I/Isat, where I0 is by default Isat=16.6889462814 m/m^2 is the saturation intensity of the D2 line of rubidium for circularly polarized light. Optionally, a frequency scale `Omega` can be provided.
>>> print(electric_field_amplitude_intensity(1.0, units="ad-hoc"))
9.0152984553
>>> print(electric_field_amplitude_intensity(1.0, Omega=1.0, units="SI"))
112.135917207
>>> print(electric_field_amplitude_intensity(1.0, units="SI"))
0.000112135917207
"""
E0_sat = sqrt(2 * mu0 * c * Isat) / Omega
if units == 'ad-hoc':
e0 = hbar / (e * a0) # This is the electric field scale.
E0_sat = E0_sat / e0 # depends on [control=['if'], data=[]]
return E0_sat * sqrt(s0)
|
def CopyToDatetime(cls, timestamp, timezone, raise_error=False):
"""Copies the timestamp to a datetime object.
Args:
timestamp: The timestamp which is an integer containing the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
timezone: The timezone (pytz.timezone) object.
raise_error: Boolean that if set to True will not absorb an OverflowError
if the timestamp is out of bounds. By default there will be
no error raised.
Returns:
A datetime object (instance of datetime.datetime). A datetime object of
January 1, 1970 00:00:00 UTC is returned on error if raises_error is
not set.
Raises:
OverflowError: If raises_error is set to True and an overflow error
occurs.
ValueError: If raises_error is set to True and no timestamp value is
provided.
"""
datetime_object = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC)
if not timestamp:
if raise_error:
raise ValueError('Missing timestamp value')
return datetime_object
try:
datetime_object += datetime.timedelta(microseconds=timestamp)
return datetime_object.astimezone(timezone)
except OverflowError as exception:
if raise_error:
raise
logging.error((
'Unable to copy {0:d} to a datetime object with error: '
'{1!s}').format(timestamp, exception))
return datetime_object
|
def function[CopyToDatetime, parameter[cls, timestamp, timezone, raise_error]]:
constant[Copies the timestamp to a datetime object.
Args:
timestamp: The timestamp which is an integer containing the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
timezone: The timezone (pytz.timezone) object.
raise_error: Boolean that if set to True will not absorb an OverflowError
if the timestamp is out of bounds. By default there will be
no error raised.
Returns:
A datetime object (instance of datetime.datetime). A datetime object of
January 1, 1970 00:00:00 UTC is returned on error if raises_error is
not set.
Raises:
OverflowError: If raises_error is set to True and an overflow error
occurs.
ValueError: If raises_error is set to True and no timestamp value is
provided.
]
variable[datetime_object] assign[=] call[name[datetime].datetime, parameter[constant[1970], constant[1], constant[1], constant[0], constant[0], constant[0], constant[0]]]
if <ast.UnaryOp object at 0x7da18dc994e0> begin[:]
if name[raise_error] begin[:]
<ast.Raise object at 0x7da18dc98340>
return[name[datetime_object]]
<ast.Try object at 0x7da18dc9ace0>
return[name[datetime_object]]
|
keyword[def] identifier[CopyToDatetime] ( identifier[cls] , identifier[timestamp] , identifier[timezone] , identifier[raise_error] = keyword[False] ):
literal[string]
identifier[datetime_object] = identifier[datetime] . identifier[datetime] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , identifier[tzinfo] = identifier[pytz] . identifier[UTC] )
keyword[if] keyword[not] identifier[timestamp] :
keyword[if] identifier[raise_error] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[datetime_object]
keyword[try] :
identifier[datetime_object] += identifier[datetime] . identifier[timedelta] ( identifier[microseconds] = identifier[timestamp] )
keyword[return] identifier[datetime_object] . identifier[astimezone] ( identifier[timezone] )
keyword[except] identifier[OverflowError] keyword[as] identifier[exception] :
keyword[if] identifier[raise_error] :
keyword[raise]
identifier[logging] . identifier[error] ((
literal[string]
literal[string] ). identifier[format] ( identifier[timestamp] , identifier[exception] ))
keyword[return] identifier[datetime_object]
|
def CopyToDatetime(cls, timestamp, timezone, raise_error=False):
"""Copies the timestamp to a datetime object.
Args:
timestamp: The timestamp which is an integer containing the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
timezone: The timezone (pytz.timezone) object.
raise_error: Boolean that if set to True will not absorb an OverflowError
if the timestamp is out of bounds. By default there will be
no error raised.
Returns:
A datetime object (instance of datetime.datetime). A datetime object of
January 1, 1970 00:00:00 UTC is returned on error if raises_error is
not set.
Raises:
OverflowError: If raises_error is set to True and an overflow error
occurs.
ValueError: If raises_error is set to True and no timestamp value is
provided.
"""
datetime_object = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC)
if not timestamp:
if raise_error:
raise ValueError('Missing timestamp value') # depends on [control=['if'], data=[]]
return datetime_object # depends on [control=['if'], data=[]]
try:
datetime_object += datetime.timedelta(microseconds=timestamp)
return datetime_object.astimezone(timezone) # depends on [control=['try'], data=[]]
except OverflowError as exception:
if raise_error:
raise # depends on [control=['if'], data=[]]
logging.error('Unable to copy {0:d} to a datetime object with error: {1!s}'.format(timestamp, exception)) # depends on [control=['except'], data=['exception']]
return datetime_object
|
def _print_formatted_docstring(self, docstring, f):
"""Formats the given `docstring` as Markdown and prints it to `f`."""
lines = self._remove_docstring_indent(docstring)
# Output the lines, identifying "Args" and other section blocks.
i = 0
def _at_start_of_section():
"""Returns the header if lines[i] is at start of a docstring section."""
l = lines[i]
match = _section_re.match(l)
if match and i + 1 < len(
lines) and lines[i + 1].startswith(" "):
return match.group(1)
else:
return None
while i < len(lines):
l = lines[i]
section_header = _at_start_of_section()
if section_header:
if i == 0 or lines[i-1]:
print("", file=f)
# Use at least H4 to keep these out of the TOC.
print("##### " + section_header + ":", file=f)
print("", file=f)
i += 1
outputting_list = False
while i < len(lines):
l = lines[i]
# A new section header terminates the section.
if _at_start_of_section():
break
match = _arg_re.match(l)
if match:
if not outputting_list:
# We need to start a list. In Markdown, a blank line needs to
# precede a list.
print("", file=f)
outputting_list = True
suffix = l[len(match.group()):].lstrip()
print("* <b>`" + match.group(1) + "`</b>: " + suffix, file=f)
else:
# For lines that don't start with _arg_re, continue the list if it
# has enough indentation.
outputting_list &= l.startswith(" ")
print(l, file=f)
i += 1
else:
print(l, file=f)
i += 1
|
def function[_print_formatted_docstring, parameter[self, docstring, f]]:
constant[Formats the given `docstring` as Markdown and prints it to `f`.]
variable[lines] assign[=] call[name[self]._remove_docstring_indent, parameter[name[docstring]]]
variable[i] assign[=] constant[0]
def function[_at_start_of_section, parameter[]]:
constant[Returns the header if lines[i] is at start of a docstring section.]
variable[l] assign[=] call[name[lines]][name[i]]
variable[match] assign[=] call[name[_section_re].match, parameter[name[l]]]
if <ast.BoolOp object at 0x7da18f00e9b0> begin[:]
return[call[name[match].group, parameter[constant[1]]]]
while compare[name[i] less[<] call[name[len], parameter[name[lines]]]] begin[:]
variable[l] assign[=] call[name[lines]][name[i]]
variable[section_header] assign[=] call[name[_at_start_of_section], parameter[]]
if name[section_header] begin[:]
if <ast.BoolOp object at 0x7da2044c2290> begin[:]
call[name[print], parameter[constant[]]]
call[name[print], parameter[binary_operation[binary_operation[constant[##### ] + name[section_header]] + constant[:]]]]
call[name[print], parameter[constant[]]]
<ast.AugAssign object at 0x7da2044c3310>
variable[outputting_list] assign[=] constant[False]
while compare[name[i] less[<] call[name[len], parameter[name[lines]]]] begin[:]
variable[l] assign[=] call[name[lines]][name[i]]
if call[name[_at_start_of_section], parameter[]] begin[:]
break
variable[match] assign[=] call[name[_arg_re].match, parameter[name[l]]]
if name[match] begin[:]
if <ast.UnaryOp object at 0x7da18f00da80> begin[:]
call[name[print], parameter[constant[]]]
variable[outputting_list] assign[=] constant[True]
variable[suffix] assign[=] call[call[name[l]][<ast.Slice object at 0x7da18f00f8b0>].lstrip, parameter[]]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[* <b>`] + call[name[match].group, parameter[constant[1]]]] + constant[`</b>: ]] + name[suffix]]]]
<ast.AugAssign object at 0x7da18f00d990>
|
keyword[def] identifier[_print_formatted_docstring] ( identifier[self] , identifier[docstring] , identifier[f] ):
literal[string]
identifier[lines] = identifier[self] . identifier[_remove_docstring_indent] ( identifier[docstring] )
identifier[i] = literal[int]
keyword[def] identifier[_at_start_of_section] ():
literal[string]
identifier[l] = identifier[lines] [ identifier[i] ]
identifier[match] = identifier[_section_re] . identifier[match] ( identifier[l] )
keyword[if] identifier[match] keyword[and] identifier[i] + literal[int] < identifier[len] (
identifier[lines] ) keyword[and] identifier[lines] [ identifier[i] + literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[return] identifier[match] . identifier[group] ( literal[int] )
keyword[else] :
keyword[return] keyword[None]
keyword[while] identifier[i] < identifier[len] ( identifier[lines] ):
identifier[l] = identifier[lines] [ identifier[i] ]
identifier[section_header] = identifier[_at_start_of_section] ()
keyword[if] identifier[section_header] :
keyword[if] identifier[i] == literal[int] keyword[or] identifier[lines] [ identifier[i] - literal[int] ]:
identifier[print] ( literal[string] , identifier[file] = identifier[f] )
identifier[print] ( literal[string] + identifier[section_header] + literal[string] , identifier[file] = identifier[f] )
identifier[print] ( literal[string] , identifier[file] = identifier[f] )
identifier[i] += literal[int]
identifier[outputting_list] = keyword[False]
keyword[while] identifier[i] < identifier[len] ( identifier[lines] ):
identifier[l] = identifier[lines] [ identifier[i] ]
keyword[if] identifier[_at_start_of_section] ():
keyword[break]
identifier[match] = identifier[_arg_re] . identifier[match] ( identifier[l] )
keyword[if] identifier[match] :
keyword[if] keyword[not] identifier[outputting_list] :
identifier[print] ( literal[string] , identifier[file] = identifier[f] )
identifier[outputting_list] = keyword[True]
identifier[suffix] = identifier[l] [ identifier[len] ( identifier[match] . identifier[group] ()):]. identifier[lstrip] ()
identifier[print] ( literal[string] + identifier[match] . identifier[group] ( literal[int] )+ literal[string] + identifier[suffix] , identifier[file] = identifier[f] )
keyword[else] :
identifier[outputting_list] &= identifier[l] . identifier[startswith] ( literal[string] )
identifier[print] ( identifier[l] , identifier[file] = identifier[f] )
identifier[i] += literal[int]
keyword[else] :
identifier[print] ( identifier[l] , identifier[file] = identifier[f] )
identifier[i] += literal[int]
|
def _print_formatted_docstring(self, docstring, f):
"""Formats the given `docstring` as Markdown and prints it to `f`."""
lines = self._remove_docstring_indent(docstring)
# Output the lines, identifying "Args" and other section blocks.
i = 0
def _at_start_of_section():
"""Returns the header if lines[i] is at start of a docstring section."""
l = lines[i]
match = _section_re.match(l)
if match and i + 1 < len(lines) and lines[i + 1].startswith(' '):
return match.group(1) # depends on [control=['if'], data=[]]
else:
return None
while i < len(lines):
l = lines[i]
section_header = _at_start_of_section()
if section_header:
if i == 0 or lines[i - 1]:
print('', file=f) # depends on [control=['if'], data=[]]
# Use at least H4 to keep these out of the TOC.
print('##### ' + section_header + ':', file=f)
print('', file=f)
i += 1
outputting_list = False
while i < len(lines):
l = lines[i]
# A new section header terminates the section.
if _at_start_of_section():
break # depends on [control=['if'], data=[]]
match = _arg_re.match(l)
if match:
if not outputting_list:
# We need to start a list. In Markdown, a blank line needs to
# precede a list.
print('', file=f)
outputting_list = True # depends on [control=['if'], data=[]]
suffix = l[len(match.group()):].lstrip()
print('* <b>`' + match.group(1) + '`</b>: ' + suffix, file=f) # depends on [control=['if'], data=[]]
else:
# For lines that don't start with _arg_re, continue the list if it
# has enough indentation.
outputting_list &= l.startswith(' ')
print(l, file=f)
i += 1 # depends on [control=['while'], data=['i']] # depends on [control=['if'], data=[]]
else:
print(l, file=f)
i += 1 # depends on [control=['while'], data=['i']]
|
def upload_files(self, source_paths, dir_name=None):
'''批量创建上传任务, 会扫描子目录并依次上传.
source_path - 本地文件的绝对路径
dir_name - 文件在服务器上的父目录, 如果为None的话, 会弹出一个
对话框让用户来选择一个目录.
'''
def scan_folders(folder_path):
file_list = os.listdir(folder_path)
source_paths = [os.path.join(folder_path, f) for f in file_list]
self.upload_files(source_paths,
os.path.join(dir_name, os.path.split(folder_path)[1]))
self.check_first()
if not dir_name:
folder_dialog = FolderBrowserDialog(self, self.app)
response = folder_dialog.run()
if response != Gtk.ResponseType.OK:
folder_dialog.destroy()
return
dir_name = folder_dialog.get_path()
folder_dialog.destroy()
invalid_paths = []
for source_path in source_paths:
if util.validate_pathname(source_path) != ValidatePathState.OK:
invalid_paths.append(source_path)
continue
if (os.path.split(source_path)[1].startswith('.') and
not self.app.profile['upload-hidden-files']):
continue
if os.path.isfile(source_path):
self.upload_file(source_path, dir_name)
elif os.path.isdir(source_path):
scan_folders(source_path)
self.app.blink_page(self)
self.scan_tasks()
if not invalid_paths:
return
dialog = Gtk.Dialog(_('Invalid Filepath'), self.app.window,
Gtk.DialogFlags.MODAL,
(Gtk.STOCK_CLOSE, Gtk.ResponseType.OK))
dialog.set_default_size(640, 480)
dialog.set_border_width(10)
box = dialog.get_content_area()
scrolled_window = Gtk.ScrolledWindow()
box.pack_start(scrolled_window, True, True, 0)
text_buffer = Gtk.TextBuffer()
textview = Gtk.TextView.new_with_buffer(text_buffer)
scrolled_window.add(textview)
for invalid_path in invalid_paths:
text_buffer.insert_at_cursor(invalid_path)
text_buffer.insert_at_cursor('\n')
infobar = Gtk.InfoBar()
infobar.set_message_type(Gtk.MessageType.ERROR)
box.pack_end(infobar, False, False, 0)
info_label= Gtk.Label()
infobar.get_content_area().pack_start(info_label, False, False, 0)
info_label.set_label(''.join([
'* ', ValidatePathStateText[1], '\n',
'* ', ValidatePathStateText[2], '\n',
'* ', ValidatePathStateText[3], '\n',
]))
box.show_all()
dialog.run()
dialog.destroy()
|
def function[upload_files, parameter[self, source_paths, dir_name]]:
constant[批量创建上传任务, 会扫描子目录并依次上传.
source_path - 本地文件的绝对路径
dir_name - 文件在服务器上的父目录, 如果为None的话, 会弹出一个
对话框让用户来选择一个目录.
]
def function[scan_folders, parameter[folder_path]]:
variable[file_list] assign[=] call[name[os].listdir, parameter[name[folder_path]]]
variable[source_paths] assign[=] <ast.ListComp object at 0x7da1b1d6e470>
call[name[self].upload_files, parameter[name[source_paths], call[name[os].path.join, parameter[name[dir_name], call[call[name[os].path.split, parameter[name[folder_path]]]][constant[1]]]]]]
call[name[self].check_first, parameter[]]
if <ast.UnaryOp object at 0x7da1b1d6de10> begin[:]
variable[folder_dialog] assign[=] call[name[FolderBrowserDialog], parameter[name[self], name[self].app]]
variable[response] assign[=] call[name[folder_dialog].run, parameter[]]
if compare[name[response] not_equal[!=] name[Gtk].ResponseType.OK] begin[:]
call[name[folder_dialog].destroy, parameter[]]
return[None]
variable[dir_name] assign[=] call[name[folder_dialog].get_path, parameter[]]
call[name[folder_dialog].destroy, parameter[]]
variable[invalid_paths] assign[=] list[[]]
for taget[name[source_path]] in starred[name[source_paths]] begin[:]
if compare[call[name[util].validate_pathname, parameter[name[source_path]]] not_equal[!=] name[ValidatePathState].OK] begin[:]
call[name[invalid_paths].append, parameter[name[source_path]]]
continue
if <ast.BoolOp object at 0x7da1b1d6d270> begin[:]
continue
if call[name[os].path.isfile, parameter[name[source_path]]] begin[:]
call[name[self].upload_file, parameter[name[source_path], name[dir_name]]]
call[name[self].app.blink_page, parameter[name[self]]]
call[name[self].scan_tasks, parameter[]]
if <ast.UnaryOp object at 0x7da1b1d6c9a0> begin[:]
return[None]
variable[dialog] assign[=] call[name[Gtk].Dialog, parameter[call[name[_], parameter[constant[Invalid Filepath]]], name[self].app.window, name[Gtk].DialogFlags.MODAL, tuple[[<ast.Attribute object at 0x7da1b1d6c5b0>, <ast.Attribute object at 0x7da1b1d6f5e0>]]]]
call[name[dialog].set_default_size, parameter[constant[640], constant[480]]]
call[name[dialog].set_border_width, parameter[constant[10]]]
variable[box] assign[=] call[name[dialog].get_content_area, parameter[]]
variable[scrolled_window] assign[=] call[name[Gtk].ScrolledWindow, parameter[]]
call[name[box].pack_start, parameter[name[scrolled_window], constant[True], constant[True], constant[0]]]
variable[text_buffer] assign[=] call[name[Gtk].TextBuffer, parameter[]]
variable[textview] assign[=] call[name[Gtk].TextView.new_with_buffer, parameter[name[text_buffer]]]
call[name[scrolled_window].add, parameter[name[textview]]]
for taget[name[invalid_path]] in starred[name[invalid_paths]] begin[:]
call[name[text_buffer].insert_at_cursor, parameter[name[invalid_path]]]
call[name[text_buffer].insert_at_cursor, parameter[constant[
]]]
variable[infobar] assign[=] call[name[Gtk].InfoBar, parameter[]]
call[name[infobar].set_message_type, parameter[name[Gtk].MessageType.ERROR]]
call[name[box].pack_end, parameter[name[infobar], constant[False], constant[False], constant[0]]]
variable[info_label] assign[=] call[name[Gtk].Label, parameter[]]
call[call[name[infobar].get_content_area, parameter[]].pack_start, parameter[name[info_label], constant[False], constant[False], constant[0]]]
call[name[info_label].set_label, parameter[call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b1d6f310>, <ast.Subscript object at 0x7da1b1d6fcd0>, <ast.Constant object at 0x7da1b1d6f2e0>, <ast.Constant object at 0x7da1b1d6fdc0>, <ast.Subscript object at 0x7da1b1d6fd90>, <ast.Constant object at 0x7da1b1df9e70>, <ast.Constant object at 0x7da1b1df8c40>, <ast.Subscript object at 0x7da1b1df8a90>, <ast.Constant object at 0x7da1b1dfb550>]]]]]]
call[name[box].show_all, parameter[]]
call[name[dialog].run, parameter[]]
call[name[dialog].destroy, parameter[]]
|
keyword[def] identifier[upload_files] ( identifier[self] , identifier[source_paths] , identifier[dir_name] = keyword[None] ):
literal[string]
keyword[def] identifier[scan_folders] ( identifier[folder_path] ):
identifier[file_list] = identifier[os] . identifier[listdir] ( identifier[folder_path] )
identifier[source_paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[folder_path] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[file_list] ]
identifier[self] . identifier[upload_files] ( identifier[source_paths] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[dir_name] , identifier[os] . identifier[path] . identifier[split] ( identifier[folder_path] )[ literal[int] ]))
identifier[self] . identifier[check_first] ()
keyword[if] keyword[not] identifier[dir_name] :
identifier[folder_dialog] = identifier[FolderBrowserDialog] ( identifier[self] , identifier[self] . identifier[app] )
identifier[response] = identifier[folder_dialog] . identifier[run] ()
keyword[if] identifier[response] != identifier[Gtk] . identifier[ResponseType] . identifier[OK] :
identifier[folder_dialog] . identifier[destroy] ()
keyword[return]
identifier[dir_name] = identifier[folder_dialog] . identifier[get_path] ()
identifier[folder_dialog] . identifier[destroy] ()
identifier[invalid_paths] =[]
keyword[for] identifier[source_path] keyword[in] identifier[source_paths] :
keyword[if] identifier[util] . identifier[validate_pathname] ( identifier[source_path] )!= identifier[ValidatePathState] . identifier[OK] :
identifier[invalid_paths] . identifier[append] ( identifier[source_path] )
keyword[continue]
keyword[if] ( identifier[os] . identifier[path] . identifier[split] ( identifier[source_path] )[ literal[int] ]. identifier[startswith] ( literal[string] ) keyword[and]
keyword[not] identifier[self] . identifier[app] . identifier[profile] [ literal[string] ]):
keyword[continue]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[source_path] ):
identifier[self] . identifier[upload_file] ( identifier[source_path] , identifier[dir_name] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[source_path] ):
identifier[scan_folders] ( identifier[source_path] )
identifier[self] . identifier[app] . identifier[blink_page] ( identifier[self] )
identifier[self] . identifier[scan_tasks] ()
keyword[if] keyword[not] identifier[invalid_paths] :
keyword[return]
identifier[dialog] = identifier[Gtk] . identifier[Dialog] ( identifier[_] ( literal[string] ), identifier[self] . identifier[app] . identifier[window] ,
identifier[Gtk] . identifier[DialogFlags] . identifier[MODAL] ,
( identifier[Gtk] . identifier[STOCK_CLOSE] , identifier[Gtk] . identifier[ResponseType] . identifier[OK] ))
identifier[dialog] . identifier[set_default_size] ( literal[int] , literal[int] )
identifier[dialog] . identifier[set_border_width] ( literal[int] )
identifier[box] = identifier[dialog] . identifier[get_content_area] ()
identifier[scrolled_window] = identifier[Gtk] . identifier[ScrolledWindow] ()
identifier[box] . identifier[pack_start] ( identifier[scrolled_window] , keyword[True] , keyword[True] , literal[int] )
identifier[text_buffer] = identifier[Gtk] . identifier[TextBuffer] ()
identifier[textview] = identifier[Gtk] . identifier[TextView] . identifier[new_with_buffer] ( identifier[text_buffer] )
identifier[scrolled_window] . identifier[add] ( identifier[textview] )
keyword[for] identifier[invalid_path] keyword[in] identifier[invalid_paths] :
identifier[text_buffer] . identifier[insert_at_cursor] ( identifier[invalid_path] )
identifier[text_buffer] . identifier[insert_at_cursor] ( literal[string] )
identifier[infobar] = identifier[Gtk] . identifier[InfoBar] ()
identifier[infobar] . identifier[set_message_type] ( identifier[Gtk] . identifier[MessageType] . identifier[ERROR] )
identifier[box] . identifier[pack_end] ( identifier[infobar] , keyword[False] , keyword[False] , literal[int] )
identifier[info_label] = identifier[Gtk] . identifier[Label] ()
identifier[infobar] . identifier[get_content_area] (). identifier[pack_start] ( identifier[info_label] , keyword[False] , keyword[False] , literal[int] )
identifier[info_label] . identifier[set_label] ( literal[string] . identifier[join] ([
literal[string] , identifier[ValidatePathStateText] [ literal[int] ], literal[string] ,
literal[string] , identifier[ValidatePathStateText] [ literal[int] ], literal[string] ,
literal[string] , identifier[ValidatePathStateText] [ literal[int] ], literal[string] ,
]))
identifier[box] . identifier[show_all] ()
identifier[dialog] . identifier[run] ()
identifier[dialog] . identifier[destroy] ()
|
def upload_files(self, source_paths, dir_name=None):
"""批量创建上传任务, 会扫描子目录并依次上传.
source_path - 本地文件的绝对路径
dir_name - 文件在服务器上的父目录, 如果为None的话, 会弹出一个
对话框让用户来选择一个目录.
"""
def scan_folders(folder_path):
file_list = os.listdir(folder_path)
source_paths = [os.path.join(folder_path, f) for f in file_list]
self.upload_files(source_paths, os.path.join(dir_name, os.path.split(folder_path)[1]))
self.check_first()
if not dir_name:
folder_dialog = FolderBrowserDialog(self, self.app)
response = folder_dialog.run()
if response != Gtk.ResponseType.OK:
folder_dialog.destroy()
return # depends on [control=['if'], data=[]]
dir_name = folder_dialog.get_path()
folder_dialog.destroy() # depends on [control=['if'], data=[]]
invalid_paths = []
for source_path in source_paths:
if util.validate_pathname(source_path) != ValidatePathState.OK:
invalid_paths.append(source_path)
continue # depends on [control=['if'], data=[]]
if os.path.split(source_path)[1].startswith('.') and (not self.app.profile['upload-hidden-files']):
continue # depends on [control=['if'], data=[]]
if os.path.isfile(source_path):
self.upload_file(source_path, dir_name) # depends on [control=['if'], data=[]]
elif os.path.isdir(source_path):
scan_folders(source_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['source_path']]
self.app.blink_page(self)
self.scan_tasks()
if not invalid_paths:
return # depends on [control=['if'], data=[]]
dialog = Gtk.Dialog(_('Invalid Filepath'), self.app.window, Gtk.DialogFlags.MODAL, (Gtk.STOCK_CLOSE, Gtk.ResponseType.OK))
dialog.set_default_size(640, 480)
dialog.set_border_width(10)
box = dialog.get_content_area()
scrolled_window = Gtk.ScrolledWindow()
box.pack_start(scrolled_window, True, True, 0)
text_buffer = Gtk.TextBuffer()
textview = Gtk.TextView.new_with_buffer(text_buffer)
scrolled_window.add(textview)
for invalid_path in invalid_paths:
text_buffer.insert_at_cursor(invalid_path)
text_buffer.insert_at_cursor('\n') # depends on [control=['for'], data=['invalid_path']]
infobar = Gtk.InfoBar()
infobar.set_message_type(Gtk.MessageType.ERROR)
box.pack_end(infobar, False, False, 0)
info_label = Gtk.Label()
infobar.get_content_area().pack_start(info_label, False, False, 0)
info_label.set_label(''.join(['* ', ValidatePathStateText[1], '\n', '* ', ValidatePathStateText[2], '\n', '* ', ValidatePathStateText[3], '\n']))
box.show_all()
dialog.run()
dialog.destroy()
|
def add_mag_drifts(inst):
"""Adds ion drifts in magnetic coordinates using ion drifts in S/C coordinates
along with pre-calculated unit vectors for magnetic coordinates.
Note
----
Requires ion drifts under labels 'iv_*' where * = (x,y,z) along with
unit vectors labels 'unit_zonal_*', 'unit_fa_*', and 'unit_mer_*',
where the unit vectors are expressed in S/C coordinates. These
vectors are calculated by add_mag_drift_unit_vectors.
Parameters
----------
inst : pysat.Instrument
Instrument object will be modified to include new ion drift magnitudes
Returns
-------
None
Instrument object modified in place
"""
inst['iv_zon'] = {'data':inst['unit_zon_x'] * inst['iv_x'] + inst['unit_zon_y']*inst['iv_y'] + inst['unit_zon_z']*inst['iv_z'],
'units':'m/s',
'long_name':'Zonal ion velocity',
'notes':('Ion velocity relative to co-rotation along zonal '
'direction, normal to meridional plane. Positive east. '
'Velocity obtained using ion velocities relative '
'to co-rotation in the instrument frame along '
'with the corresponding unit vectors expressed in '
'the instrument frame. '),
'label': 'Zonal Ion Velocity',
'axis': 'Zonal Ion Velocity',
'desc': 'Zonal ion velocity',
'scale': 'Linear',
'value_min':-500.,
'value_max':500.}
inst['iv_fa'] = {'data':inst['unit_fa_x'] * inst['iv_x'] + inst['unit_fa_y'] * inst['iv_y'] + inst['unit_fa_z'] * inst['iv_z'],
'units':'m/s',
'long_name':'Field-Aligned ion velocity',
'notes':('Ion velocity relative to co-rotation along magnetic field line. Positive along the field. ',
'Velocity obtained using ion velocities relative '
'to co-rotation in the instrument frame along '
'with the corresponding unit vectors expressed in '
'the instrument frame. '),
'label':'Field-Aligned Ion Velocity',
'axis':'Field-Aligned Ion Velocity',
'desc':'Field-Aligned Ion Velocity',
'scale':'Linear',
'value_min':-500.,
'value_max':500.}
inst['iv_mer'] = {'data':inst['unit_mer_x'] * inst['iv_x'] + inst['unit_mer_y']*inst['iv_y'] + inst['unit_mer_z']*inst['iv_z'],
'units':'m/s',
'long_name':'Meridional ion velocity',
'notes':('Velocity along meridional direction, perpendicular '
'to field and within meridional plane. Positive is up at magnetic equator. ',
'Velocity obtained using ion velocities relative '
'to co-rotation in the instrument frame along '
'with the corresponding unit vectors expressed in '
'the instrument frame. '),
'label':'Meridional Ion Velocity',
'axis':'Meridional Ion Velocity',
'desc':'Meridional Ion Velocity',
'scale':'Linear',
'value_min':-500.,
'value_max':500.}
return
|
def function[add_mag_drifts, parameter[inst]]:
constant[Adds ion drifts in magnetic coordinates using ion drifts in S/C coordinates
along with pre-calculated unit vectors for magnetic coordinates.
Note
----
Requires ion drifts under labels 'iv_*' where * = (x,y,z) along with
unit vectors labels 'unit_zonal_*', 'unit_fa_*', and 'unit_mer_*',
where the unit vectors are expressed in S/C coordinates. These
vectors are calculated by add_mag_drift_unit_vectors.
Parameters
----------
inst : pysat.Instrument
Instrument object will be modified to include new ion drift magnitudes
Returns
-------
None
Instrument object modified in place
]
call[name[inst]][constant[iv_zon]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bd8640>, <ast.Constant object at 0x7da1b0bd9900>, <ast.Constant object at 0x7da1b0bd9ff0>, <ast.Constant object at 0x7da1b0bdad10>, <ast.Constant object at 0x7da1b0bd9d50>, <ast.Constant object at 0x7da1b0bdaad0>, <ast.Constant object at 0x7da1b0bdb5b0>, <ast.Constant object at 0x7da1b0bdb370>, <ast.Constant object at 0x7da1b0bd8550>, <ast.Constant object at 0x7da1b0bdbc40>], [<ast.BinOp object at 0x7da1b0bdbd00>, <ast.Constant object at 0x7da1b0bd9990>, <ast.Constant object at 0x7da1b0bd91b0>, <ast.Constant object at 0x7da1b0bdbe50>, <ast.Constant object at 0x7da1b0bd96f0>, <ast.Constant object at 0x7da1b0bd98d0>, <ast.Constant object at 0x7da1b0bd9b40>, <ast.Constant object at 0x7da1b0bdab30>, <ast.UnaryOp object at 0x7da1b0bd92d0>, <ast.Constant object at 0x7da1b0bd9ae0>]]
call[name[inst]][constant[iv_fa]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bda800>, <ast.Constant object at 0x7da1b0bdaa70>, <ast.Constant object at 0x7da1b0bd8190>, <ast.Constant object at 0x7da1b0bdb9a0>, <ast.Constant object at 0x7da1b0bd9ab0>, <ast.Constant object at 0x7da1b0bd94e0>, <ast.Constant object at 0x7da1b0bd8730>, <ast.Constant object at 0x7da1b0bda320>, <ast.Constant object at 0x7da1b0bd8c10>, <ast.Constant object at 0x7da1b0bdb640>], [<ast.BinOp object at 0x7da1b0bd9660>, <ast.Constant object at 0x7da1b0bd8070>, <ast.Constant object at 0x7da1b0bdb220>, <ast.Tuple object at 0x7da1b0bda170>, <ast.Constant object at 0x7da1b0bda470>, <ast.Constant object at 0x7da1b0bdb160>, <ast.Constant object at 0x7da1b0bd8e20>, <ast.Constant object at 0x7da1b0bdae60>, <ast.UnaryOp object at 0x7da1b0bd9c30>, <ast.Constant object at 0x7da1b0bd8b20>]]
call[name[inst]][constant[iv_mer]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bdb040>, <ast.Constant object at 0x7da1b0bdace0>, <ast.Constant object at 0x7da1b0bd8880>, <ast.Constant object at 0x7da1b0bdba00>, <ast.Constant object at 0x7da1b0bd8be0>, <ast.Constant object at 0x7da1b0bdbf10>, <ast.Constant object at 0x7da1b0bd9030>, <ast.Constant object at 0x7da1b0bd9330>, <ast.Constant object at 0x7da1b0bdb760>, <ast.Constant object at 0x7da1b0bdabc0>], [<ast.BinOp object at 0x7da1b0bd9c00>, <ast.Constant object at 0x7da1b0bd83a0>, <ast.Constant object at 0x7da1b0bd8670>, <ast.Tuple object at 0x7da1b0b72aa0>, <ast.Constant object at 0x7da1b0b70250>, <ast.Constant object at 0x7da1b0b70ca0>, <ast.Constant object at 0x7da1b0b70460>, <ast.Constant object at 0x7da1b0b73a00>, <ast.UnaryOp object at 0x7da1b0b73520>, <ast.Constant object at 0x7da1b0b73a60>]]
return[None]
|
keyword[def] identifier[add_mag_drifts] ( identifier[inst] ):
literal[string]
identifier[inst] [ literal[string] ]={ literal[string] : identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ),
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :- literal[int] ,
literal[string] : literal[int] }
identifier[inst] [ literal[string] ]={ literal[string] : identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :( literal[string] ,
literal[string]
literal[string]
literal[string]
literal[string] ),
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :- literal[int] ,
literal[string] : literal[int] }
identifier[inst] [ literal[string] ]={ literal[string] : identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ]+ identifier[inst] [ literal[string] ]* identifier[inst] [ literal[string] ],
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :( literal[string]
literal[string] ,
literal[string]
literal[string]
literal[string]
literal[string] ),
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :- literal[int] ,
literal[string] : literal[int] }
keyword[return]
|
def add_mag_drifts(inst):
"""Adds ion drifts in magnetic coordinates using ion drifts in S/C coordinates
along with pre-calculated unit vectors for magnetic coordinates.
Note
----
Requires ion drifts under labels 'iv_*' where * = (x,y,z) along with
unit vectors labels 'unit_zonal_*', 'unit_fa_*', and 'unit_mer_*',
where the unit vectors are expressed in S/C coordinates. These
vectors are calculated by add_mag_drift_unit_vectors.
Parameters
----------
inst : pysat.Instrument
Instrument object will be modified to include new ion drift magnitudes
Returns
-------
None
Instrument object modified in place
"""
inst['iv_zon'] = {'data': inst['unit_zon_x'] * inst['iv_x'] + inst['unit_zon_y'] * inst['iv_y'] + inst['unit_zon_z'] * inst['iv_z'], 'units': 'm/s', 'long_name': 'Zonal ion velocity', 'notes': 'Ion velocity relative to co-rotation along zonal direction, normal to meridional plane. Positive east. Velocity obtained using ion velocities relative to co-rotation in the instrument frame along with the corresponding unit vectors expressed in the instrument frame. ', 'label': 'Zonal Ion Velocity', 'axis': 'Zonal Ion Velocity', 'desc': 'Zonal ion velocity', 'scale': 'Linear', 'value_min': -500.0, 'value_max': 500.0}
inst['iv_fa'] = {'data': inst['unit_fa_x'] * inst['iv_x'] + inst['unit_fa_y'] * inst['iv_y'] + inst['unit_fa_z'] * inst['iv_z'], 'units': 'm/s', 'long_name': 'Field-Aligned ion velocity', 'notes': ('Ion velocity relative to co-rotation along magnetic field line. Positive along the field. ', 'Velocity obtained using ion velocities relative to co-rotation in the instrument frame along with the corresponding unit vectors expressed in the instrument frame. '), 'label': 'Field-Aligned Ion Velocity', 'axis': 'Field-Aligned Ion Velocity', 'desc': 'Field-Aligned Ion Velocity', 'scale': 'Linear', 'value_min': -500.0, 'value_max': 500.0}
inst['iv_mer'] = {'data': inst['unit_mer_x'] * inst['iv_x'] + inst['unit_mer_y'] * inst['iv_y'] + inst['unit_mer_z'] * inst['iv_z'], 'units': 'm/s', 'long_name': 'Meridional ion velocity', 'notes': ('Velocity along meridional direction, perpendicular to field and within meridional plane. Positive is up at magnetic equator. ', 'Velocity obtained using ion velocities relative to co-rotation in the instrument frame along with the corresponding unit vectors expressed in the instrument frame. '), 'label': 'Meridional Ion Velocity', 'axis': 'Meridional Ion Velocity', 'desc': 'Meridional Ion Velocity', 'scale': 'Linear', 'value_min': -500.0, 'value_max': 500.0}
return
|
def draw_annulus(self, center, inner_radius, outer_radius, array, value, mode="set"):
"""
Draws an annulus of specified radius on the input array and fills it with specified value
:param center: a tuple for the center of the annulus
:type center: tuple (x,y)
:param inner_radius: how many pixels in radius the interior empty circle is, where the annulus begins
:type inner_radius: int
:param outer_radius: how many pixels in radius the larger outer circle is, where the annulus ends
:typde outer_radius: int
:param array: image to draw annulus on
:type array: size (m,n) numpy array
:param value: what value to fill the annulus with
:type value: float
:param mode: if "set" will assign the circle interior value, if "add" will add the value to the circle interior,
throws exception otherwise
:type mode: string, either "set" or "add"
:return: updates input array and then returns it with the annulus coordinates as a tuple
"""
if mode == "add":
self.draw_circle(center, outer_radius, array, value)
self.draw_circle(center, inner_radius, array, -value)
elif mode == "set":
ri, ci, existing = self.draw_circle(center, inner_radius, array, -value)
self.draw_circle(center, outer_radius, array, value)
array[ri, ci] = existing
else:
raise ValueError("draw_annulus mode must be 'set' or 'add' but {} used".format(mode))
|
def function[draw_annulus, parameter[self, center, inner_radius, outer_radius, array, value, mode]]:
constant[
Draws an annulus of specified radius on the input array and fills it with specified value
:param center: a tuple for the center of the annulus
:type center: tuple (x,y)
:param inner_radius: how many pixels in radius the interior empty circle is, where the annulus begins
:type inner_radius: int
:param outer_radius: how many pixels in radius the larger outer circle is, where the annulus ends
:typde outer_radius: int
:param array: image to draw annulus on
:type array: size (m,n) numpy array
:param value: what value to fill the annulus with
:type value: float
:param mode: if "set" will assign the circle interior value, if "add" will add the value to the circle interior,
throws exception otherwise
:type mode: string, either "set" or "add"
:return: updates input array and then returns it with the annulus coordinates as a tuple
]
if compare[name[mode] equal[==] constant[add]] begin[:]
call[name[self].draw_circle, parameter[name[center], name[outer_radius], name[array], name[value]]]
call[name[self].draw_circle, parameter[name[center], name[inner_radius], name[array], <ast.UnaryOp object at 0x7da20c76e440>]]
|
keyword[def] identifier[draw_annulus] ( identifier[self] , identifier[center] , identifier[inner_radius] , identifier[outer_radius] , identifier[array] , identifier[value] , identifier[mode] = literal[string] ):
literal[string]
keyword[if] identifier[mode] == literal[string] :
identifier[self] . identifier[draw_circle] ( identifier[center] , identifier[outer_radius] , identifier[array] , identifier[value] )
identifier[self] . identifier[draw_circle] ( identifier[center] , identifier[inner_radius] , identifier[array] ,- identifier[value] )
keyword[elif] identifier[mode] == literal[string] :
identifier[ri] , identifier[ci] , identifier[existing] = identifier[self] . identifier[draw_circle] ( identifier[center] , identifier[inner_radius] , identifier[array] ,- identifier[value] )
identifier[self] . identifier[draw_circle] ( identifier[center] , identifier[outer_radius] , identifier[array] , identifier[value] )
identifier[array] [ identifier[ri] , identifier[ci] ]= identifier[existing]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[mode] ))
|
def draw_annulus(self, center, inner_radius, outer_radius, array, value, mode='set'):
"""
Draws an annulus of specified radius on the input array and fills it with specified value
:param center: a tuple for the center of the annulus
:type center: tuple (x,y)
:param inner_radius: how many pixels in radius the interior empty circle is, where the annulus begins
:type inner_radius: int
:param outer_radius: how many pixels in radius the larger outer circle is, where the annulus ends
:typde outer_radius: int
:param array: image to draw annulus on
:type array: size (m,n) numpy array
:param value: what value to fill the annulus with
:type value: float
:param mode: if "set" will assign the circle interior value, if "add" will add the value to the circle interior,
throws exception otherwise
:type mode: string, either "set" or "add"
:return: updates input array and then returns it with the annulus coordinates as a tuple
"""
if mode == 'add':
self.draw_circle(center, outer_radius, array, value)
self.draw_circle(center, inner_radius, array, -value) # depends on [control=['if'], data=[]]
elif mode == 'set':
(ri, ci, existing) = self.draw_circle(center, inner_radius, array, -value)
self.draw_circle(center, outer_radius, array, value)
array[ri, ci] = existing # depends on [control=['if'], data=[]]
else:
raise ValueError("draw_annulus mode must be 'set' or 'add' but {} used".format(mode))
|
def get_field_identifier(self):
"""
Return the lag/lead function with the offset and default value
"""
if self.default is None:
return '{0}, {1}'.format(self.field.get_select_sql(), self.offset)
return "{0}, {1}, '{2}'".format(self.field.get_select_sql(), self.offset, self.default)
|
def function[get_field_identifier, parameter[self]]:
constant[
Return the lag/lead function with the offset and default value
]
if compare[name[self].default is constant[None]] begin[:]
return[call[constant[{0}, {1}].format, parameter[call[name[self].field.get_select_sql, parameter[]], name[self].offset]]]
return[call[constant[{0}, {1}, '{2}'].format, parameter[call[name[self].field.get_select_sql, parameter[]], name[self].offset, name[self].default]]]
|
keyword[def] identifier[get_field_identifier] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[default] keyword[is] keyword[None] :
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[field] . identifier[get_select_sql] (), identifier[self] . identifier[offset] )
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[field] . identifier[get_select_sql] (), identifier[self] . identifier[offset] , identifier[self] . identifier[default] )
|
def get_field_identifier(self):
"""
Return the lag/lead function with the offset and default value
"""
if self.default is None:
return '{0}, {1}'.format(self.field.get_select_sql(), self.offset) # depends on [control=['if'], data=[]]
return "{0}, {1}, '{2}'".format(self.field.get_select_sql(), self.offset, self.default)
|
def stop(self):
"""
Stop the worker.
"""
if self._http_server is not None:
self._http_server.stop()
tornado.ioloop.IOLoop.instance().add_callback(
tornado.ioloop.IOLoop.instance().stop)
|
def function[stop, parameter[self]]:
constant[
Stop the worker.
]
if compare[name[self]._http_server is_not constant[None]] begin[:]
call[name[self]._http_server.stop, parameter[]]
call[call[name[tornado].ioloop.IOLoop.instance, parameter[]].add_callback, parameter[call[name[tornado].ioloop.IOLoop.instance, parameter[]].stop]]
|
keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_http_server] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_http_server] . identifier[stop] ()
identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[instance] (). identifier[add_callback] (
identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[instance] (). identifier[stop] )
|
def stop(self):
"""
Stop the worker.
"""
if self._http_server is not None:
self._http_server.stop() # depends on [control=['if'], data=[]]
tornado.ioloop.IOLoop.instance().add_callback(tornado.ioloop.IOLoop.instance().stop)
|
def _get_values(self, rdn):
"""
Returns a dict of prepped values contained in an RDN
:param rdn:
A RelativeDistinguishedName object
:return:
A dict object with unicode strings of NameTypeAndValue value field
values that have been prepped for comparison
"""
output = {}
[output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
return output
|
def function[_get_values, parameter[self, rdn]]:
constant[
Returns a dict of prepped values contained in an RDN
:param rdn:
A RelativeDistinguishedName object
:return:
A dict object with unicode strings of NameTypeAndValue value field
values that have been prepped for comparison
]
variable[output] assign[=] dictionary[[], []]
<ast.ListComp object at 0x7da20cabfbe0>
return[name[output]]
|
keyword[def] identifier[_get_values] ( identifier[self] , identifier[rdn] ):
literal[string]
identifier[output] ={}
[ identifier[output] . identifier[update] ([( identifier[ntv] [ literal[string] ]. identifier[native] , identifier[ntv] . identifier[prepped_value] )]) keyword[for] identifier[ntv] keyword[in] identifier[rdn] ]
keyword[return] identifier[output]
|
def _get_values(self, rdn):
"""
Returns a dict of prepped values contained in an RDN
:param rdn:
A RelativeDistinguishedName object
:return:
A dict object with unicode strings of NameTypeAndValue value field
values that have been prepped for comparison
"""
output = {}
[output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
return output
|
def uses_placeholder_y(ds):
"""If ``ds`` is a ``skorch.dataset.Dataset`` or a
``skorch.dataset.Dataset`` nested inside a
``torch.utils.data.Subset`` and uses
y as a placeholder, return ``True``."""
if isinstance(ds, torch.utils.data.Subset):
return uses_placeholder_y(ds.dataset)
return isinstance(ds, Dataset) and hasattr(ds, "y") and ds.y is None
|
def function[uses_placeholder_y, parameter[ds]]:
constant[If ``ds`` is a ``skorch.dataset.Dataset`` or a
``skorch.dataset.Dataset`` nested inside a
``torch.utils.data.Subset`` and uses
y as a placeholder, return ``True``.]
if call[name[isinstance], parameter[name[ds], name[torch].utils.data.Subset]] begin[:]
return[call[name[uses_placeholder_y], parameter[name[ds].dataset]]]
return[<ast.BoolOp object at 0x7da18dc04070>]
|
keyword[def] identifier[uses_placeholder_y] ( identifier[ds] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[ds] , identifier[torch] . identifier[utils] . identifier[data] . identifier[Subset] ):
keyword[return] identifier[uses_placeholder_y] ( identifier[ds] . identifier[dataset] )
keyword[return] identifier[isinstance] ( identifier[ds] , identifier[Dataset] ) keyword[and] identifier[hasattr] ( identifier[ds] , literal[string] ) keyword[and] identifier[ds] . identifier[y] keyword[is] keyword[None]
|
def uses_placeholder_y(ds):
"""If ``ds`` is a ``skorch.dataset.Dataset`` or a
``skorch.dataset.Dataset`` nested inside a
``torch.utils.data.Subset`` and uses
y as a placeholder, return ``True``."""
if isinstance(ds, torch.utils.data.Subset):
return uses_placeholder_y(ds.dataset) # depends on [control=['if'], data=[]]
return isinstance(ds, Dataset) and hasattr(ds, 'y') and (ds.y is None)
|
def decorator(caller, func=None):
"""
decorator(caller) converts a caller function into a decorator;
decorator(caller, func) decorates a function using a caller.
"""
if func is not None: # returns a decorated function
evaldict = func.func_globals.copy()
evaldict['_call_'] = caller
evaldict['_func_'] = func
return FunctionMaker.create(
func, "return _call_(_func_, %(shortsignature)s)",
evaldict, undecorated=func, __wrapped__=func)
else: # returns a decorator
if isinstance(caller, partial):
return partial(decorator, caller)
# otherwise assume caller is a function
first = inspect.getargspec(caller)[0][0] # first arg
evaldict = caller.func_globals.copy()
evaldict['_call_'] = caller
evaldict['decorator'] = decorator
return FunctionMaker.create(
'%s(%s)' % (caller.__name__, first),
'return decorator(_call_, %s)' % first,
evaldict, undecorated=caller, __wrapped__=caller,
doc=caller.__doc__, module=caller.__module__)
|
def function[decorator, parameter[caller, func]]:
constant[
decorator(caller) converts a caller function into a decorator;
decorator(caller, func) decorates a function using a caller.
]
if compare[name[func] is_not constant[None]] begin[:]
variable[evaldict] assign[=] call[name[func].func_globals.copy, parameter[]]
call[name[evaldict]][constant[_call_]] assign[=] name[caller]
call[name[evaldict]][constant[_func_]] assign[=] name[func]
return[call[name[FunctionMaker].create, parameter[name[func], constant[return _call_(_func_, %(shortsignature)s)], name[evaldict]]]]
|
keyword[def] identifier[decorator] ( identifier[caller] , identifier[func] = keyword[None] ):
literal[string]
keyword[if] identifier[func] keyword[is] keyword[not] keyword[None] :
identifier[evaldict] = identifier[func] . identifier[func_globals] . identifier[copy] ()
identifier[evaldict] [ literal[string] ]= identifier[caller]
identifier[evaldict] [ literal[string] ]= identifier[func]
keyword[return] identifier[FunctionMaker] . identifier[create] (
identifier[func] , literal[string] ,
identifier[evaldict] , identifier[undecorated] = identifier[func] , identifier[__wrapped__] = identifier[func] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[caller] , identifier[partial] ):
keyword[return] identifier[partial] ( identifier[decorator] , identifier[caller] )
identifier[first] = identifier[inspect] . identifier[getargspec] ( identifier[caller] )[ literal[int] ][ literal[int] ]
identifier[evaldict] = identifier[caller] . identifier[func_globals] . identifier[copy] ()
identifier[evaldict] [ literal[string] ]= identifier[caller]
identifier[evaldict] [ literal[string] ]= identifier[decorator]
keyword[return] identifier[FunctionMaker] . identifier[create] (
literal[string] %( identifier[caller] . identifier[__name__] , identifier[first] ),
literal[string] % identifier[first] ,
identifier[evaldict] , identifier[undecorated] = identifier[caller] , identifier[__wrapped__] = identifier[caller] ,
identifier[doc] = identifier[caller] . identifier[__doc__] , identifier[module] = identifier[caller] . identifier[__module__] )
|
def decorator(caller, func=None):
"""
decorator(caller) converts a caller function into a decorator;
decorator(caller, func) decorates a function using a caller.
"""
if func is not None: # returns a decorated function
evaldict = func.func_globals.copy()
evaldict['_call_'] = caller
evaldict['_func_'] = func
return FunctionMaker.create(func, 'return _call_(_func_, %(shortsignature)s)', evaldict, undecorated=func, __wrapped__=func) # depends on [control=['if'], data=['func']]
else: # returns a decorator
if isinstance(caller, partial):
return partial(decorator, caller) # depends on [control=['if'], data=[]]
# otherwise assume caller is a function
first = inspect.getargspec(caller)[0][0] # first arg
evaldict = caller.func_globals.copy()
evaldict['_call_'] = caller
evaldict['decorator'] = decorator
return FunctionMaker.create('%s(%s)' % (caller.__name__, first), 'return decorator(_call_, %s)' % first, evaldict, undecorated=caller, __wrapped__=caller, doc=caller.__doc__, module=caller.__module__)
|
def subdomain_try_insert(self, cursor, subdomain_rec, history_neighbors):
"""
Try to insert a subdomain record into its history neighbors.
This is an optimization that handles the "usual" case.
We can do this without having to rewrite this subdomain's past and future
if (1) we can find a previously-accepted subdomain record, and (2) the transition
from this subdomain record to a future subdomain record preserves its
acceptance as True. In this case, the "far" past and "far" future are already
consistent.
Return True if we succeed in doing so.
Return False if not.
"""
blockchain_order = history_neighbors['prev'] + history_neighbors['cur'] + history_neighbors['fut']
last_accepted = -1
for i in range(0, len(blockchain_order)):
if blockchain_order[i].accepted:
last_accepted = i
break
if blockchain_order[i].n > subdomain_rec.n or (blockchain_order[i].n == subdomain_rec.n and blockchain_order[i].parent_zonefile_index > subdomain_rec.parent_zonefile_index):
# can't cheaply insert this subdomain record,
# since none of its immediate ancestors are accepted.
log.debug("No immediate ancestors are accepted on {}".format(subdomain_rec))
return False
if last_accepted < 0:
log.debug("No immediate ancestors or successors are accepted on {}".format(subdomain_rec))
return False
# one ancestor was accepted.
# work from there.
chain_tip_status = blockchain_order[-1].accepted
dirty = [] # to be written
for i in range(last_accepted+1, len(blockchain_order)):
cur_accepted = blockchain_order[i].accepted
new_accepted = self.check_subdomain_transition(blockchain_order[last_accepted], blockchain_order[i])
if new_accepted != cur_accepted:
blockchain_order[i].accepted = new_accepted
log.debug("Changed from {} to {}: {}".format(cur_accepted, new_accepted, blockchain_order[i]))
dirty.append(blockchain_order[i])
if new_accepted:
last_accepted = i
if chain_tip_status != blockchain_order[-1].accepted and len(history_neighbors['fut']) > 0:
# deeper reorg
log.debug("Immediate history chain tip altered from {} to {}: {}".format(chain_tip_status, blockchain_order[-1].accepted, blockchain_order[-1]))
return False
# localized change. Just commit the dirty entries
for subrec in dirty:
log.debug("Update to accepted={}: {}".format(subrec.accepted, subrec))
self.subdomain_db.update_subdomain_entry(subrec, cur=cursor)
return True
|
def function[subdomain_try_insert, parameter[self, cursor, subdomain_rec, history_neighbors]]:
constant[
Try to insert a subdomain record into its history neighbors.
This is an optimization that handles the "usual" case.
We can do this without having to rewrite this subdomain's past and future
if (1) we can find a previously-accepted subdomain record, and (2) the transition
from this subdomain record to a future subdomain record preserves its
acceptance as True. In this case, the "far" past and "far" future are already
consistent.
Return True if we succeed in doing so.
Return False if not.
]
variable[blockchain_order] assign[=] binary_operation[binary_operation[call[name[history_neighbors]][constant[prev]] + call[name[history_neighbors]][constant[cur]]] + call[name[history_neighbors]][constant[fut]]]
variable[last_accepted] assign[=] <ast.UnaryOp object at 0x7da1b16a8fd0>
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[blockchain_order]]]]]] begin[:]
if call[name[blockchain_order]][name[i]].accepted begin[:]
variable[last_accepted] assign[=] name[i]
break
if <ast.BoolOp object at 0x7da1b16ab4f0> begin[:]
call[name[log].debug, parameter[call[constant[No immediate ancestors are accepted on {}].format, parameter[name[subdomain_rec]]]]]
return[constant[False]]
if compare[name[last_accepted] less[<] constant[0]] begin[:]
call[name[log].debug, parameter[call[constant[No immediate ancestors or successors are accepted on {}].format, parameter[name[subdomain_rec]]]]]
return[constant[False]]
variable[chain_tip_status] assign[=] call[name[blockchain_order]][<ast.UnaryOp object at 0x7da1b16aa2c0>].accepted
variable[dirty] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[last_accepted] + constant[1]], call[name[len], parameter[name[blockchain_order]]]]]] begin[:]
variable[cur_accepted] assign[=] call[name[blockchain_order]][name[i]].accepted
variable[new_accepted] assign[=] call[name[self].check_subdomain_transition, parameter[call[name[blockchain_order]][name[last_accepted]], call[name[blockchain_order]][name[i]]]]
if compare[name[new_accepted] not_equal[!=] name[cur_accepted]] begin[:]
call[name[blockchain_order]][name[i]].accepted assign[=] name[new_accepted]
call[name[log].debug, parameter[call[constant[Changed from {} to {}: {}].format, parameter[name[cur_accepted], name[new_accepted], call[name[blockchain_order]][name[i]]]]]]
call[name[dirty].append, parameter[call[name[blockchain_order]][name[i]]]]
if name[new_accepted] begin[:]
variable[last_accepted] assign[=] name[i]
if <ast.BoolOp object at 0x7da1b180d2d0> begin[:]
call[name[log].debug, parameter[call[constant[Immediate history chain tip altered from {} to {}: {}].format, parameter[name[chain_tip_status], call[name[blockchain_order]][<ast.UnaryOp object at 0x7da1b180eaa0>].accepted, call[name[blockchain_order]][<ast.UnaryOp object at 0x7da204344640>]]]]]
return[constant[False]]
for taget[name[subrec]] in starred[name[dirty]] begin[:]
call[name[log].debug, parameter[call[constant[Update to accepted={}: {}].format, parameter[name[subrec].accepted, name[subrec]]]]]
call[name[self].subdomain_db.update_subdomain_entry, parameter[name[subrec]]]
return[constant[True]]
|
keyword[def] identifier[subdomain_try_insert] ( identifier[self] , identifier[cursor] , identifier[subdomain_rec] , identifier[history_neighbors] ):
literal[string]
identifier[blockchain_order] = identifier[history_neighbors] [ literal[string] ]+ identifier[history_neighbors] [ literal[string] ]+ identifier[history_neighbors] [ literal[string] ]
identifier[last_accepted] =- literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[blockchain_order] )):
keyword[if] identifier[blockchain_order] [ identifier[i] ]. identifier[accepted] :
identifier[last_accepted] = identifier[i]
keyword[break]
keyword[if] identifier[blockchain_order] [ identifier[i] ]. identifier[n] > identifier[subdomain_rec] . identifier[n] keyword[or] ( identifier[blockchain_order] [ identifier[i] ]. identifier[n] == identifier[subdomain_rec] . identifier[n] keyword[and] identifier[blockchain_order] [ identifier[i] ]. identifier[parent_zonefile_index] > identifier[subdomain_rec] . identifier[parent_zonefile_index] ):
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[subdomain_rec] ))
keyword[return] keyword[False]
keyword[if] identifier[last_accepted] < literal[int] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[subdomain_rec] ))
keyword[return] keyword[False]
identifier[chain_tip_status] = identifier[blockchain_order] [- literal[int] ]. identifier[accepted]
identifier[dirty] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[last_accepted] + literal[int] , identifier[len] ( identifier[blockchain_order] )):
identifier[cur_accepted] = identifier[blockchain_order] [ identifier[i] ]. identifier[accepted]
identifier[new_accepted] = identifier[self] . identifier[check_subdomain_transition] ( identifier[blockchain_order] [ identifier[last_accepted] ], identifier[blockchain_order] [ identifier[i] ])
keyword[if] identifier[new_accepted] != identifier[cur_accepted] :
identifier[blockchain_order] [ identifier[i] ]. identifier[accepted] = identifier[new_accepted]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cur_accepted] , identifier[new_accepted] , identifier[blockchain_order] [ identifier[i] ]))
identifier[dirty] . identifier[append] ( identifier[blockchain_order] [ identifier[i] ])
keyword[if] identifier[new_accepted] :
identifier[last_accepted] = identifier[i]
keyword[if] identifier[chain_tip_status] != identifier[blockchain_order] [- literal[int] ]. identifier[accepted] keyword[and] identifier[len] ( identifier[history_neighbors] [ literal[string] ])> literal[int] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[chain_tip_status] , identifier[blockchain_order] [- literal[int] ]. identifier[accepted] , identifier[blockchain_order] [- literal[int] ]))
keyword[return] keyword[False]
keyword[for] identifier[subrec] keyword[in] identifier[dirty] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[subrec] . identifier[accepted] , identifier[subrec] ))
identifier[self] . identifier[subdomain_db] . identifier[update_subdomain_entry] ( identifier[subrec] , identifier[cur] = identifier[cursor] )
keyword[return] keyword[True]
|
def subdomain_try_insert(self, cursor, subdomain_rec, history_neighbors):
"""
Try to insert a subdomain record into its history neighbors.
This is an optimization that handles the "usual" case.
We can do this without having to rewrite this subdomain's past and future
if (1) we can find a previously-accepted subdomain record, and (2) the transition
from this subdomain record to a future subdomain record preserves its
acceptance as True. In this case, the "far" past and "far" future are already
consistent.
Return True if we succeed in doing so.
Return False if not.
"""
blockchain_order = history_neighbors['prev'] + history_neighbors['cur'] + history_neighbors['fut']
last_accepted = -1
for i in range(0, len(blockchain_order)):
if blockchain_order[i].accepted:
last_accepted = i
break # depends on [control=['if'], data=[]]
if blockchain_order[i].n > subdomain_rec.n or (blockchain_order[i].n == subdomain_rec.n and blockchain_order[i].parent_zonefile_index > subdomain_rec.parent_zonefile_index):
# can't cheaply insert this subdomain record,
# since none of its immediate ancestors are accepted.
log.debug('No immediate ancestors are accepted on {}'.format(subdomain_rec))
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if last_accepted < 0:
log.debug('No immediate ancestors or successors are accepted on {}'.format(subdomain_rec))
return False # depends on [control=['if'], data=[]]
# one ancestor was accepted.
# work from there.
chain_tip_status = blockchain_order[-1].accepted
dirty = [] # to be written
for i in range(last_accepted + 1, len(blockchain_order)):
cur_accepted = blockchain_order[i].accepted
new_accepted = self.check_subdomain_transition(blockchain_order[last_accepted], blockchain_order[i])
if new_accepted != cur_accepted:
blockchain_order[i].accepted = new_accepted
log.debug('Changed from {} to {}: {}'.format(cur_accepted, new_accepted, blockchain_order[i]))
dirty.append(blockchain_order[i]) # depends on [control=['if'], data=['new_accepted', 'cur_accepted']]
if new_accepted:
last_accepted = i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if chain_tip_status != blockchain_order[-1].accepted and len(history_neighbors['fut']) > 0:
# deeper reorg
log.debug('Immediate history chain tip altered from {} to {}: {}'.format(chain_tip_status, blockchain_order[-1].accepted, blockchain_order[-1]))
return False # depends on [control=['if'], data=[]]
# localized change. Just commit the dirty entries
for subrec in dirty:
log.debug('Update to accepted={}: {}'.format(subrec.accepted, subrec))
self.subdomain_db.update_subdomain_entry(subrec, cur=cursor) # depends on [control=['for'], data=['subrec']]
return True
|
def decamelise(text):
"""Convert CamelCase to lower_and_underscore."""
s = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', text)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s).lower()
|
def function[decamelise, parameter[text]]:
constant[Convert CamelCase to lower_and_underscore.]
variable[s] assign[=] call[name[re].sub, parameter[constant[(.)([A-Z][a-z]+)], constant[\1_\2], name[text]]]
return[call[call[name[re].sub, parameter[constant[([a-z0-9])([A-Z])], constant[\1_\2], name[s]]].lower, parameter[]]]
|
keyword[def] identifier[decamelise] ( identifier[text] ):
literal[string]
identifier[s] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[text] )
keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[s] ). identifier[lower] ()
|
def decamelise(text):
"""Convert CamelCase to lower_and_underscore."""
s = re.sub('(.)([A-Z][a-z]+)', '\\1_\\2', text)
return re.sub('([a-z0-9])([A-Z])', '\\1_\\2', s).lower()
|
def log_y_cb(self, w, val):
"""Toggle linear/log scale for Y-axis."""
self.tab_plot.logy = val
self.plot_two_columns()
|
def function[log_y_cb, parameter[self, w, val]]:
constant[Toggle linear/log scale for Y-axis.]
name[self].tab_plot.logy assign[=] name[val]
call[name[self].plot_two_columns, parameter[]]
|
keyword[def] identifier[log_y_cb] ( identifier[self] , identifier[w] , identifier[val] ):
literal[string]
identifier[self] . identifier[tab_plot] . identifier[logy] = identifier[val]
identifier[self] . identifier[plot_two_columns] ()
|
def log_y_cb(self, w, val):
"""Toggle linear/log scale for Y-axis."""
self.tab_plot.logy = val
self.plot_two_columns()
|
def remove_triple(self, p, o, auto_refresh=True):
'''
remove triple by supplying p,o
Args:
p (rdflib.term.URIRef): predicate
o (): object
auto_refresh (bool): whether or not to update object-like self.rdf.triples
Returns:
None: removes triple from self.rdf.graph
'''
self.rdf.graph.remove((self.uri, p, self._handle_object(o)))
# determine if triples refreshed
self._handle_triple_refresh(auto_refresh)
|
def function[remove_triple, parameter[self, p, o, auto_refresh]]:
constant[
remove triple by supplying p,o
Args:
p (rdflib.term.URIRef): predicate
o (): object
auto_refresh (bool): whether or not to update object-like self.rdf.triples
Returns:
None: removes triple from self.rdf.graph
]
call[name[self].rdf.graph.remove, parameter[tuple[[<ast.Attribute object at 0x7da1b2429720>, <ast.Name object at 0x7da1b2428670>, <ast.Call object at 0x7da1b242bd30>]]]]
call[name[self]._handle_triple_refresh, parameter[name[auto_refresh]]]
|
keyword[def] identifier[remove_triple] ( identifier[self] , identifier[p] , identifier[o] , identifier[auto_refresh] = keyword[True] ):
literal[string]
identifier[self] . identifier[rdf] . identifier[graph] . identifier[remove] (( identifier[self] . identifier[uri] , identifier[p] , identifier[self] . identifier[_handle_object] ( identifier[o] )))
identifier[self] . identifier[_handle_triple_refresh] ( identifier[auto_refresh] )
|
def remove_triple(self, p, o, auto_refresh=True):
"""
remove triple by supplying p,o
Args:
p (rdflib.term.URIRef): predicate
o (): object
auto_refresh (bool): whether or not to update object-like self.rdf.triples
Returns:
None: removes triple from self.rdf.graph
"""
self.rdf.graph.remove((self.uri, p, self._handle_object(o))) # determine if triples refreshed
self._handle_triple_refresh(auto_refresh)
|
def iterate_chunks(i, size=10):
"""
Iterate over an iterator ``i`` in ``size`` chunks, yield chunks.
Similar to pagination.
Example::
>>> list(iterate_chunks([1, 2, 3, 4], size=2))
[[1, 2], [3, 4]]
"""
accumulator = []
for n, i in enumerate(i):
accumulator.append(i)
if (n+1) % size == 0:
yield accumulator
accumulator = []
if accumulator:
yield accumulator
|
def function[iterate_chunks, parameter[i, size]]:
constant[
Iterate over an iterator ``i`` in ``size`` chunks, yield chunks.
Similar to pagination.
Example::
>>> list(iterate_chunks([1, 2, 3, 4], size=2))
[[1, 2], [3, 4]]
]
variable[accumulator] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0f42830>, <ast.Name object at 0x7da1b0f43a00>]]] in starred[call[name[enumerate], parameter[name[i]]]] begin[:]
call[name[accumulator].append, parameter[name[i]]]
if compare[binary_operation[binary_operation[name[n] + constant[1]] <ast.Mod object at 0x7da2590d6920> name[size]] equal[==] constant[0]] begin[:]
<ast.Yield object at 0x7da1b0f435e0>
variable[accumulator] assign[=] list[[]]
if name[accumulator] begin[:]
<ast.Yield object at 0x7da1b0f40550>
|
keyword[def] identifier[iterate_chunks] ( identifier[i] , identifier[size] = literal[int] ):
literal[string]
identifier[accumulator] =[]
keyword[for] identifier[n] , identifier[i] keyword[in] identifier[enumerate] ( identifier[i] ):
identifier[accumulator] . identifier[append] ( identifier[i] )
keyword[if] ( identifier[n] + literal[int] )% identifier[size] == literal[int] :
keyword[yield] identifier[accumulator]
identifier[accumulator] =[]
keyword[if] identifier[accumulator] :
keyword[yield] identifier[accumulator]
|
def iterate_chunks(i, size=10):
"""
Iterate over an iterator ``i`` in ``size`` chunks, yield chunks.
Similar to pagination.
Example::
>>> list(iterate_chunks([1, 2, 3, 4], size=2))
[[1, 2], [3, 4]]
"""
accumulator = []
for (n, i) in enumerate(i):
accumulator.append(i)
if (n + 1) % size == 0:
yield accumulator
accumulator = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if accumulator:
yield accumulator # depends on [control=['if'], data=[]]
|
def set_quickchart_resource(self, resource):
# type: (Union[hdx.data.resource.Resource,Dict,str,int]) -> bool
"""Set the resource that will be used for displaying QuickCharts in dataset preview
Args:
resource (Union[hdx.data.resource.Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position
Returns:
bool: Returns True if resource for QuickCharts in dataset preview set or False if not
"""
if isinstance(resource, int) and not isinstance(resource, bool):
resource = self.get_resources()[resource]
if isinstance(resource, hdx.data.resource.Resource) or isinstance(resource, dict):
res = resource.get('id')
if res is None:
resource = resource['name']
else:
resource = res
elif not isinstance(resource, str):
raise hdx.data.hdxobject.HDXError('Resource id cannot be found in type %s!' % type(resource).__name__)
if is_valid_uuid(resource) is True:
search = 'id'
else:
search = 'name'
changed = False
for dataset_resource in self.resources:
if dataset_resource[search] == resource:
dataset_resource.enable_dataset_preview()
self.preview_resource()
changed = True
else:
dataset_resource.disable_dataset_preview()
return changed
|
def function[set_quickchart_resource, parameter[self, resource]]:
constant[Set the resource that will be used for displaying QuickCharts in dataset preview
Args:
resource (Union[hdx.data.resource.Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position
Returns:
bool: Returns True if resource for QuickCharts in dataset preview set or False if not
]
if <ast.BoolOp object at 0x7da20c76da20> begin[:]
variable[resource] assign[=] call[call[name[self].get_resources, parameter[]]][name[resource]]
if <ast.BoolOp object at 0x7da20c6c6230> begin[:]
variable[res] assign[=] call[name[resource].get, parameter[constant[id]]]
if compare[name[res] is constant[None]] begin[:]
variable[resource] assign[=] call[name[resource]][constant[name]]
if compare[call[name[is_valid_uuid], parameter[name[resource]]] is constant[True]] begin[:]
variable[search] assign[=] constant[id]
variable[changed] assign[=] constant[False]
for taget[name[dataset_resource]] in starred[name[self].resources] begin[:]
if compare[call[name[dataset_resource]][name[search]] equal[==] name[resource]] begin[:]
call[name[dataset_resource].enable_dataset_preview, parameter[]]
call[name[self].preview_resource, parameter[]]
variable[changed] assign[=] constant[True]
return[name[changed]]
|
keyword[def] identifier[set_quickchart_resource] ( identifier[self] , identifier[resource] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[resource] , identifier[int] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[resource] , identifier[bool] ):
identifier[resource] = identifier[self] . identifier[get_resources] ()[ identifier[resource] ]
keyword[if] identifier[isinstance] ( identifier[resource] , identifier[hdx] . identifier[data] . identifier[resource] . identifier[Resource] ) keyword[or] identifier[isinstance] ( identifier[resource] , identifier[dict] ):
identifier[res] = identifier[resource] . identifier[get] ( literal[string] )
keyword[if] identifier[res] keyword[is] keyword[None] :
identifier[resource] = identifier[resource] [ literal[string] ]
keyword[else] :
identifier[resource] = identifier[res]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[resource] , identifier[str] ):
keyword[raise] identifier[hdx] . identifier[data] . identifier[hdxobject] . identifier[HDXError] ( literal[string] % identifier[type] ( identifier[resource] ). identifier[__name__] )
keyword[if] identifier[is_valid_uuid] ( identifier[resource] ) keyword[is] keyword[True] :
identifier[search] = literal[string]
keyword[else] :
identifier[search] = literal[string]
identifier[changed] = keyword[False]
keyword[for] identifier[dataset_resource] keyword[in] identifier[self] . identifier[resources] :
keyword[if] identifier[dataset_resource] [ identifier[search] ]== identifier[resource] :
identifier[dataset_resource] . identifier[enable_dataset_preview] ()
identifier[self] . identifier[preview_resource] ()
identifier[changed] = keyword[True]
keyword[else] :
identifier[dataset_resource] . identifier[disable_dataset_preview] ()
keyword[return] identifier[changed]
|
def set_quickchart_resource(self, resource):
# type: (Union[hdx.data.resource.Resource,Dict,str,int]) -> bool
'Set the resource that will be used for displaying QuickCharts in dataset preview\n\n Args:\n resource (Union[hdx.data.resource.Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position\n\n Returns:\n bool: Returns True if resource for QuickCharts in dataset preview set or False if not\n '
if isinstance(resource, int) and (not isinstance(resource, bool)):
resource = self.get_resources()[resource] # depends on [control=['if'], data=[]]
if isinstance(resource, hdx.data.resource.Resource) or isinstance(resource, dict):
res = resource.get('id')
if res is None:
resource = resource['name'] # depends on [control=['if'], data=[]]
else:
resource = res # depends on [control=['if'], data=[]]
elif not isinstance(resource, str):
raise hdx.data.hdxobject.HDXError('Resource id cannot be found in type %s!' % type(resource).__name__) # depends on [control=['if'], data=[]]
if is_valid_uuid(resource) is True:
search = 'id' # depends on [control=['if'], data=[]]
else:
search = 'name'
changed = False
for dataset_resource in self.resources:
if dataset_resource[search] == resource:
dataset_resource.enable_dataset_preview()
self.preview_resource()
changed = True # depends on [control=['if'], data=[]]
else:
dataset_resource.disable_dataset_preview() # depends on [control=['for'], data=['dataset_resource']]
return changed
|
def backend():
"""
:return:
A unicode string of the backend being used: "openssl", "osx", "win",
"winlegacy"
"""
if _module_values['backend'] is not None:
return _module_values['backend']
with _backend_lock:
if _module_values['backend'] is not None:
return _module_values['backend']
if sys.platform == 'win32':
# Windows XP was major version 5, Vista was 6
if sys.getwindowsversion()[0] < 6:
_module_values['backend'] = 'winlegacy'
else:
_module_values['backend'] = 'win'
elif sys.platform == 'darwin':
_module_values['backend'] = 'osx'
else:
_module_values['backend'] = 'openssl'
return _module_values['backend']
|
def function[backend, parameter[]]:
constant[
:return:
A unicode string of the backend being used: "openssl", "osx", "win",
"winlegacy"
]
if compare[call[name[_module_values]][constant[backend]] is_not constant[None]] begin[:]
return[call[name[_module_values]][constant[backend]]]
with name[_backend_lock] begin[:]
if compare[call[name[_module_values]][constant[backend]] is_not constant[None]] begin[:]
return[call[name[_module_values]][constant[backend]]]
if compare[name[sys].platform equal[==] constant[win32]] begin[:]
if compare[call[call[name[sys].getwindowsversion, parameter[]]][constant[0]] less[<] constant[6]] begin[:]
call[name[_module_values]][constant[backend]] assign[=] constant[winlegacy]
return[call[name[_module_values]][constant[backend]]]
|
keyword[def] identifier[backend] ():
literal[string]
keyword[if] identifier[_module_values] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_module_values] [ literal[string] ]
keyword[with] identifier[_backend_lock] :
keyword[if] identifier[_module_values] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_module_values] [ literal[string] ]
keyword[if] identifier[sys] . identifier[platform] == literal[string] :
keyword[if] identifier[sys] . identifier[getwindowsversion] ()[ literal[int] ]< literal[int] :
identifier[_module_values] [ literal[string] ]= literal[string]
keyword[else] :
identifier[_module_values] [ literal[string] ]= literal[string]
keyword[elif] identifier[sys] . identifier[platform] == literal[string] :
identifier[_module_values] [ literal[string] ]= literal[string]
keyword[else] :
identifier[_module_values] [ literal[string] ]= literal[string]
keyword[return] identifier[_module_values] [ literal[string] ]
|
def backend():
"""
:return:
A unicode string of the backend being used: "openssl", "osx", "win",
"winlegacy"
"""
if _module_values['backend'] is not None:
return _module_values['backend'] # depends on [control=['if'], data=[]]
with _backend_lock:
if _module_values['backend'] is not None:
return _module_values['backend'] # depends on [control=['if'], data=[]]
if sys.platform == 'win32':
# Windows XP was major version 5, Vista was 6
if sys.getwindowsversion()[0] < 6:
_module_values['backend'] = 'winlegacy' # depends on [control=['if'], data=[]]
else:
_module_values['backend'] = 'win' # depends on [control=['if'], data=[]]
elif sys.platform == 'darwin':
_module_values['backend'] = 'osx' # depends on [control=['if'], data=[]]
else:
_module_values['backend'] = 'openssl'
return _module_values['backend'] # depends on [control=['with'], data=[]]
|
def write_data(self, buf):
"""Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool
"""
bmRequestType = usb.util.build_request_type(
usb.util.ENDPOINT_OUT,
usb.util.CTRL_TYPE_CLASS,
usb.util.CTRL_RECIPIENT_INTERFACE
)
result = self.dev.ctrl_transfer(
bmRequestType=bmRequestType,
bRequest=usb.REQ_SET_CONFIGURATION,
data_or_wLength=buf,
wValue=0x200,
timeout=50)
if result != len(buf):
raise IOError('pywws.device_pyusb1.USBDevice.write_data failed')
return True
|
def function[write_data, parameter[self, buf]]:
constant[Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool
]
variable[bmRequestType] assign[=] call[name[usb].util.build_request_type, parameter[name[usb].util.ENDPOINT_OUT, name[usb].util.CTRL_TYPE_CLASS, name[usb].util.CTRL_RECIPIENT_INTERFACE]]
variable[result] assign[=] call[name[self].dev.ctrl_transfer, parameter[]]
if compare[name[result] not_equal[!=] call[name[len], parameter[name[buf]]]] begin[:]
<ast.Raise object at 0x7da20c7c9d50>
return[constant[True]]
|
keyword[def] identifier[write_data] ( identifier[self] , identifier[buf] ):
literal[string]
identifier[bmRequestType] = identifier[usb] . identifier[util] . identifier[build_request_type] (
identifier[usb] . identifier[util] . identifier[ENDPOINT_OUT] ,
identifier[usb] . identifier[util] . identifier[CTRL_TYPE_CLASS] ,
identifier[usb] . identifier[util] . identifier[CTRL_RECIPIENT_INTERFACE]
)
identifier[result] = identifier[self] . identifier[dev] . identifier[ctrl_transfer] (
identifier[bmRequestType] = identifier[bmRequestType] ,
identifier[bRequest] = identifier[usb] . identifier[REQ_SET_CONFIGURATION] ,
identifier[data_or_wLength] = identifier[buf] ,
identifier[wValue] = literal[int] ,
identifier[timeout] = literal[int] )
keyword[if] identifier[result] != identifier[len] ( identifier[buf] ):
keyword[raise] identifier[IOError] ( literal[string] )
keyword[return] keyword[True]
|
def write_data(self, buf):
"""Send data to the device.
If the write fails for any reason, an :obj:`IOError` exception
is raised.
:param buf: the data to send.
:type buf: list(int)
:return: success status.
:rtype: bool
"""
bmRequestType = usb.util.build_request_type(usb.util.ENDPOINT_OUT, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_INTERFACE)
result = self.dev.ctrl_transfer(bmRequestType=bmRequestType, bRequest=usb.REQ_SET_CONFIGURATION, data_or_wLength=buf, wValue=512, timeout=50)
if result != len(buf):
raise IOError('pywws.device_pyusb1.USBDevice.write_data failed') # depends on [control=['if'], data=[]]
return True
|
def fill_from_simbad (self, ident, debug=False):
"""Fill in astrometric information using the Simbad web service.
This uses the CDS Simbad web service to look up astrometric
information for the source name *ident* and fills in attributes
appropriately. Values from Simbad are not always reliable.
Returns *self*.
"""
info = get_simbad_astrometry_info (ident, debug=debug)
posref = 'unknown'
for k, v in six.iteritems (info):
if '~' in v:
continue # no info
if k == 'COO(d;A)':
self.ra = float (v) * D2R
elif k == 'COO(d;D)':
self.dec = float (v) * D2R
elif k == 'COO(E)':
a = v.split ()
self.pos_u_maj = float (a[0]) * A2R * 1e-3 # mas -> rad
self.pos_u_min = float (a[1]) * A2R * 1e-3
self.pos_u_pa = float (a[2]) * D2R
elif k == 'COO(B)':
posref = v
elif k == 'PM(A)':
self.promo_ra = float (v) # mas/yr
elif k == 'PM(D)':
self.promo_dec = float (v) # mas/yr
elif k == 'PM(E)':
a = v.split ()
self.promo_u_maj = float (a[0]) # mas/yr
self.promo_u_min = float (a[1])
self.promo_u_pa = float (a[2]) * D2R # rad!
elif k == 'PLX(V)':
self.parallax = float (v) # mas
elif k == 'PLX(E)':
self.u_parallax = float (v) # mas
elif k == 'RV(V)':
self.vradial = float (v) # km/s
elif k == 'RV(E)':
self.u_vradial = float (v) #km/s
if self.ra is None:
raise Exception ('no position returned by Simbad for "%s"' % ident)
if self.u_parallax == 0:
self.u_parallax = None
if self.u_vradial == 0:
self.u_vradial = None
# Get the right epoch of position for 2MASS positions
if posref == '2003yCat.2246....0C':
self.pos_epoch = get_2mass_epoch (self.ra, self.dec, debug)
return self
|
def function[fill_from_simbad, parameter[self, ident, debug]]:
constant[Fill in astrometric information using the Simbad web service.
This uses the CDS Simbad web service to look up astrometric
information for the source name *ident* and fills in attributes
appropriately. Values from Simbad are not always reliable.
Returns *self*.
]
variable[info] assign[=] call[name[get_simbad_astrometry_info], parameter[name[ident]]]
variable[posref] assign[=] constant[unknown]
for taget[tuple[[<ast.Name object at 0x7da1b2639de0>, <ast.Name object at 0x7da1b2639480>]]] in starred[call[name[six].iteritems, parameter[name[info]]]] begin[:]
if compare[constant[~] in name[v]] begin[:]
continue
if compare[name[k] equal[==] constant[COO(d;A)]] begin[:]
name[self].ra assign[=] binary_operation[call[name[float], parameter[name[v]]] * name[D2R]]
if compare[name[self].ra is constant[None]] begin[:]
<ast.Raise object at 0x7da1b26876a0>
if compare[name[self].u_parallax equal[==] constant[0]] begin[:]
name[self].u_parallax assign[=] constant[None]
if compare[name[self].u_vradial equal[==] constant[0]] begin[:]
name[self].u_vradial assign[=] constant[None]
if compare[name[posref] equal[==] constant[2003yCat.2246....0C]] begin[:]
name[self].pos_epoch assign[=] call[name[get_2mass_epoch], parameter[name[self].ra, name[self].dec, name[debug]]]
return[name[self]]
|
keyword[def] identifier[fill_from_simbad] ( identifier[self] , identifier[ident] , identifier[debug] = keyword[False] ):
literal[string]
identifier[info] = identifier[get_simbad_astrometry_info] ( identifier[ident] , identifier[debug] = identifier[debug] )
identifier[posref] = literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[info] ):
keyword[if] literal[string] keyword[in] identifier[v] :
keyword[continue]
keyword[if] identifier[k] == literal[string] :
identifier[self] . identifier[ra] = identifier[float] ( identifier[v] )* identifier[D2R]
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[dec] = identifier[float] ( identifier[v] )* identifier[D2R]
keyword[elif] identifier[k] == literal[string] :
identifier[a] = identifier[v] . identifier[split] ()
identifier[self] . identifier[pos_u_maj] = identifier[float] ( identifier[a] [ literal[int] ])* identifier[A2R] * literal[int]
identifier[self] . identifier[pos_u_min] = identifier[float] ( identifier[a] [ literal[int] ])* identifier[A2R] * literal[int]
identifier[self] . identifier[pos_u_pa] = identifier[float] ( identifier[a] [ literal[int] ])* identifier[D2R]
keyword[elif] identifier[k] == literal[string] :
identifier[posref] = identifier[v]
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[promo_ra] = identifier[float] ( identifier[v] )
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[promo_dec] = identifier[float] ( identifier[v] )
keyword[elif] identifier[k] == literal[string] :
identifier[a] = identifier[v] . identifier[split] ()
identifier[self] . identifier[promo_u_maj] = identifier[float] ( identifier[a] [ literal[int] ])
identifier[self] . identifier[promo_u_min] = identifier[float] ( identifier[a] [ literal[int] ])
identifier[self] . identifier[promo_u_pa] = identifier[float] ( identifier[a] [ literal[int] ])* identifier[D2R]
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[parallax] = identifier[float] ( identifier[v] )
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[u_parallax] = identifier[float] ( identifier[v] )
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[vradial] = identifier[float] ( identifier[v] )
keyword[elif] identifier[k] == literal[string] :
identifier[self] . identifier[u_vradial] = identifier[float] ( identifier[v] )
keyword[if] identifier[self] . identifier[ra] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[ident] )
keyword[if] identifier[self] . identifier[u_parallax] == literal[int] :
identifier[self] . identifier[u_parallax] = keyword[None]
keyword[if] identifier[self] . identifier[u_vradial] == literal[int] :
identifier[self] . identifier[u_vradial] = keyword[None]
keyword[if] identifier[posref] == literal[string] :
identifier[self] . identifier[pos_epoch] = identifier[get_2mass_epoch] ( identifier[self] . identifier[ra] , identifier[self] . identifier[dec] , identifier[debug] )
keyword[return] identifier[self]
|
def fill_from_simbad(self, ident, debug=False):
"""Fill in astrometric information using the Simbad web service.
This uses the CDS Simbad web service to look up astrometric
information for the source name *ident* and fills in attributes
appropriately. Values from Simbad are not always reliable.
Returns *self*.
"""
info = get_simbad_astrometry_info(ident, debug=debug)
posref = 'unknown'
for (k, v) in six.iteritems(info):
if '~' in v:
continue # no info # depends on [control=['if'], data=[]]
if k == 'COO(d;A)':
self.ra = float(v) * D2R # depends on [control=['if'], data=[]]
elif k == 'COO(d;D)':
self.dec = float(v) * D2R # depends on [control=['if'], data=[]]
elif k == 'COO(E)':
a = v.split()
self.pos_u_maj = float(a[0]) * A2R * 0.001 # mas -> rad
self.pos_u_min = float(a[1]) * A2R * 0.001
self.pos_u_pa = float(a[2]) * D2R # depends on [control=['if'], data=[]]
elif k == 'COO(B)':
posref = v # depends on [control=['if'], data=[]]
elif k == 'PM(A)':
self.promo_ra = float(v) # mas/yr # depends on [control=['if'], data=[]]
elif k == 'PM(D)':
self.promo_dec = float(v) # mas/yr # depends on [control=['if'], data=[]]
elif k == 'PM(E)':
a = v.split()
self.promo_u_maj = float(a[0]) # mas/yr
self.promo_u_min = float(a[1])
self.promo_u_pa = float(a[2]) * D2R # rad! # depends on [control=['if'], data=[]]
elif k == 'PLX(V)':
self.parallax = float(v) # mas # depends on [control=['if'], data=[]]
elif k == 'PLX(E)':
self.u_parallax = float(v) # mas # depends on [control=['if'], data=[]]
elif k == 'RV(V)':
self.vradial = float(v) # km/s # depends on [control=['if'], data=[]]
elif k == 'RV(E)':
self.u_vradial = float(v) #km/s # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if self.ra is None:
raise Exception('no position returned by Simbad for "%s"' % ident) # depends on [control=['if'], data=[]]
if self.u_parallax == 0:
self.u_parallax = None # depends on [control=['if'], data=[]]
if self.u_vradial == 0:
self.u_vradial = None # depends on [control=['if'], data=[]]
# Get the right epoch of position for 2MASS positions
if posref == '2003yCat.2246....0C':
self.pos_epoch = get_2mass_epoch(self.ra, self.dec, debug) # depends on [control=['if'], data=[]]
return self
|
def disable_validation(self, field_name):
"""Disable the validation rules for a field"""
field = self.field_dict.get(field_name)
if not field:
raise exceptions.FieldNotFound('Field not found: \'%s\' when trying to disable validation' % field_name)
field.validators = []
|
def function[disable_validation, parameter[self, field_name]]:
constant[Disable the validation rules for a field]
variable[field] assign[=] call[name[self].field_dict.get, parameter[name[field_name]]]
if <ast.UnaryOp object at 0x7da1b236ad70> begin[:]
<ast.Raise object at 0x7da1b236bdf0>
name[field].validators assign[=] list[[]]
|
keyword[def] identifier[disable_validation] ( identifier[self] , identifier[field_name] ):
literal[string]
identifier[field] = identifier[self] . identifier[field_dict] . identifier[get] ( identifier[field_name] )
keyword[if] keyword[not] identifier[field] :
keyword[raise] identifier[exceptions] . identifier[FieldNotFound] ( literal[string] % identifier[field_name] )
identifier[field] . identifier[validators] =[]
|
def disable_validation(self, field_name):
"""Disable the validation rules for a field"""
field = self.field_dict.get(field_name)
if not field:
raise exceptions.FieldNotFound("Field not found: '%s' when trying to disable validation" % field_name) # depends on [control=['if'], data=[]]
field.validators = []
|
def enable_pow_mining(chain_class: Type[BaseChain]) -> Type[BaseChain]:
"""
Inject on demand generation of the proof of work mining seal on newly
mined blocks into each of the chain's vms.
"""
if not chain_class.vm_configuration:
raise ValidationError("Chain class has no vm_configuration")
vm_configuration = _mix_in_pow_mining(chain_class.vm_configuration)
return chain_class.configure(vm_configuration=vm_configuration)
|
def function[enable_pow_mining, parameter[chain_class]]:
constant[
Inject on demand generation of the proof of work mining seal on newly
mined blocks into each of the chain's vms.
]
if <ast.UnaryOp object at 0x7da1b1645000> begin[:]
<ast.Raise object at 0x7da1b1646440>
variable[vm_configuration] assign[=] call[name[_mix_in_pow_mining], parameter[name[chain_class].vm_configuration]]
return[call[name[chain_class].configure, parameter[]]]
|
keyword[def] identifier[enable_pow_mining] ( identifier[chain_class] : identifier[Type] [ identifier[BaseChain] ])-> identifier[Type] [ identifier[BaseChain] ]:
literal[string]
keyword[if] keyword[not] identifier[chain_class] . identifier[vm_configuration] :
keyword[raise] identifier[ValidationError] ( literal[string] )
identifier[vm_configuration] = identifier[_mix_in_pow_mining] ( identifier[chain_class] . identifier[vm_configuration] )
keyword[return] identifier[chain_class] . identifier[configure] ( identifier[vm_configuration] = identifier[vm_configuration] )
|
def enable_pow_mining(chain_class: Type[BaseChain]) -> Type[BaseChain]:
"""
Inject on demand generation of the proof of work mining seal on newly
mined blocks into each of the chain's vms.
"""
if not chain_class.vm_configuration:
raise ValidationError('Chain class has no vm_configuration') # depends on [control=['if'], data=[]]
vm_configuration = _mix_in_pow_mining(chain_class.vm_configuration)
return chain_class.configure(vm_configuration=vm_configuration)
|
def release_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Invoked as :release:`N.N.N <YYYY-MM-DD>`.
Turns into useful release header + link to GH tree for the tag.
"""
# Make sure year has been specified
match = year_arg_re.match(text)
if not match:
msg = inliner.reporter.error("Must specify release date!")
return [inliner.problematic(rawtext, rawtext, msg)], [msg]
number, date = match.group(1), match.group(2)
# Lol @ access back to Sphinx
config = inliner.document.settings.env.app.config
nodelist = [release_nodes(number, number, date, config)]
# Return intermediate node
node = Release(number=number, date=date, nodelist=nodelist)
return [node], []
|
def function[release_role, parameter[name, rawtext, text, lineno, inliner, options, content]]:
constant[
Invoked as :release:`N.N.N <YYYY-MM-DD>`.
Turns into useful release header + link to GH tree for the tag.
]
variable[match] assign[=] call[name[year_arg_re].match, parameter[name[text]]]
if <ast.UnaryOp object at 0x7da1b05bea40> begin[:]
variable[msg] assign[=] call[name[inliner].reporter.error, parameter[constant[Must specify release date!]]]
return[tuple[[<ast.List object at 0x7da1b05bca60>, <ast.List object at 0x7da1b05bf700>]]]
<ast.Tuple object at 0x7da1b05bfeb0> assign[=] tuple[[<ast.Call object at 0x7da1b05bfdf0>, <ast.Call object at 0x7da1b05bf5e0>]]
variable[config] assign[=] name[inliner].document.settings.env.app.config
variable[nodelist] assign[=] list[[<ast.Call object at 0x7da1b05be260>]]
variable[node] assign[=] call[name[Release], parameter[]]
return[tuple[[<ast.List object at 0x7da1b05bca00>, <ast.List object at 0x7da1b05bf4f0>]]]
|
keyword[def] identifier[release_role] ( identifier[name] , identifier[rawtext] , identifier[text] , identifier[lineno] , identifier[inliner] , identifier[options] ={}, identifier[content] =[]):
literal[string]
identifier[match] = identifier[year_arg_re] . identifier[match] ( identifier[text] )
keyword[if] keyword[not] identifier[match] :
identifier[msg] = identifier[inliner] . identifier[reporter] . identifier[error] ( literal[string] )
keyword[return] [ identifier[inliner] . identifier[problematic] ( identifier[rawtext] , identifier[rawtext] , identifier[msg] )],[ identifier[msg] ]
identifier[number] , identifier[date] = identifier[match] . identifier[group] ( literal[int] ), identifier[match] . identifier[group] ( literal[int] )
identifier[config] = identifier[inliner] . identifier[document] . identifier[settings] . identifier[env] . identifier[app] . identifier[config]
identifier[nodelist] =[ identifier[release_nodes] ( identifier[number] , identifier[number] , identifier[date] , identifier[config] )]
identifier[node] = identifier[Release] ( identifier[number] = identifier[number] , identifier[date] = identifier[date] , identifier[nodelist] = identifier[nodelist] )
keyword[return] [ identifier[node] ],[]
|
def release_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Invoked as :release:`N.N.N <YYYY-MM-DD>`.
Turns into useful release header + link to GH tree for the tag.
"""
# Make sure year has been specified
match = year_arg_re.match(text)
if not match:
msg = inliner.reporter.error('Must specify release date!')
return ([inliner.problematic(rawtext, rawtext, msg)], [msg]) # depends on [control=['if'], data=[]]
(number, date) = (match.group(1), match.group(2))
# Lol @ access back to Sphinx
config = inliner.document.settings.env.app.config
nodelist = [release_nodes(number, number, date, config)]
# Return intermediate node
node = Release(number=number, date=date, nodelist=nodelist)
return ([node], [])
|
def rollback(gandi, resource, background):
""" Rollback a disk from a snapshot. """
result = gandi.disk.rollback(resource, background)
if background:
gandi.pretty_echo(result)
return result
|
def function[rollback, parameter[gandi, resource, background]]:
constant[ Rollback a disk from a snapshot. ]
variable[result] assign[=] call[name[gandi].disk.rollback, parameter[name[resource], name[background]]]
if name[background] begin[:]
call[name[gandi].pretty_echo, parameter[name[result]]]
return[name[result]]
|
keyword[def] identifier[rollback] ( identifier[gandi] , identifier[resource] , identifier[background] ):
literal[string]
identifier[result] = identifier[gandi] . identifier[disk] . identifier[rollback] ( identifier[resource] , identifier[background] )
keyword[if] identifier[background] :
identifier[gandi] . identifier[pretty_echo] ( identifier[result] )
keyword[return] identifier[result]
|
def rollback(gandi, resource, background):
""" Rollback a disk from a snapshot. """
result = gandi.disk.rollback(resource, background)
if background:
gandi.pretty_echo(result) # depends on [control=['if'], data=[]]
return result
|
def upload(self,project, inputtemplate, sourcefile, **kwargs):
"""Alias for ``addinputfile()``"""
return self.addinputfile(project, inputtemplate,sourcefile, **kwargs)
|
def function[upload, parameter[self, project, inputtemplate, sourcefile]]:
constant[Alias for ``addinputfile()``]
return[call[name[self].addinputfile, parameter[name[project], name[inputtemplate], name[sourcefile]]]]
|
keyword[def] identifier[upload] ( identifier[self] , identifier[project] , identifier[inputtemplate] , identifier[sourcefile] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[addinputfile] ( identifier[project] , identifier[inputtemplate] , identifier[sourcefile] ,** identifier[kwargs] )
|
def upload(self, project, inputtemplate, sourcefile, **kwargs):
"""Alias for ``addinputfile()``"""
return self.addinputfile(project, inputtemplate, sourcefile, **kwargs)
|
def jwt_verify_token(headers):
"""Verify the JWT token.
:param dict headers: The request headers.
:returns: The token data.
:rtype: dict
"""
# Get the token from headers
token = headers.get(
current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER']
)
if token is None:
raise JWTInvalidHeaderError
# Get authentication type
authentication_type = \
current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER_TYPE']
# Check if the type should be checked
if authentication_type is not None:
# Get the prefix and the token
prefix, token = token.split()
# Check if the type matches
if prefix != authentication_type:
raise JWTInvalidHeaderError
try:
# Get the token data
decode = jwt_decode_token(token)
# Check the integrity of the user
if current_user.get_id() != decode.get('sub'):
raise JWTInvalidIssuer
return decode
except _JWTDecodeError as exc:
raise_from(JWTDecodeError(), exc)
except _JWTExpiredToken as exc:
raise_from(JWTExpiredToken(), exc)
|
def function[jwt_verify_token, parameter[headers]]:
constant[Verify the JWT token.
:param dict headers: The request headers.
:returns: The token data.
:rtype: dict
]
variable[token] assign[=] call[name[headers].get, parameter[call[name[current_app].config][constant[OAUTH2SERVER_JWT_AUTH_HEADER]]]]
if compare[name[token] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b252b4f0>
variable[authentication_type] assign[=] call[name[current_app].config][constant[OAUTH2SERVER_JWT_AUTH_HEADER_TYPE]]
if compare[name[authentication_type] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b25296c0> assign[=] call[name[token].split, parameter[]]
if compare[name[prefix] not_equal[!=] name[authentication_type]] begin[:]
<ast.Raise object at 0x7da1b2528100>
<ast.Try object at 0x7da1b252a320>
|
keyword[def] identifier[jwt_verify_token] ( identifier[headers] ):
literal[string]
identifier[token] = identifier[headers] . identifier[get] (
identifier[current_app] . identifier[config] [ literal[string] ]
)
keyword[if] identifier[token] keyword[is] keyword[None] :
keyword[raise] identifier[JWTInvalidHeaderError]
identifier[authentication_type] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[if] identifier[authentication_type] keyword[is] keyword[not] keyword[None] :
identifier[prefix] , identifier[token] = identifier[token] . identifier[split] ()
keyword[if] identifier[prefix] != identifier[authentication_type] :
keyword[raise] identifier[JWTInvalidHeaderError]
keyword[try] :
identifier[decode] = identifier[jwt_decode_token] ( identifier[token] )
keyword[if] identifier[current_user] . identifier[get_id] ()!= identifier[decode] . identifier[get] ( literal[string] ):
keyword[raise] identifier[JWTInvalidIssuer]
keyword[return] identifier[decode]
keyword[except] identifier[_JWTDecodeError] keyword[as] identifier[exc] :
identifier[raise_from] ( identifier[JWTDecodeError] (), identifier[exc] )
keyword[except] identifier[_JWTExpiredToken] keyword[as] identifier[exc] :
identifier[raise_from] ( identifier[JWTExpiredToken] (), identifier[exc] )
|
def jwt_verify_token(headers):
"""Verify the JWT token.
:param dict headers: The request headers.
:returns: The token data.
:rtype: dict
"""
# Get the token from headers
token = headers.get(current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER'])
if token is None:
raise JWTInvalidHeaderError # depends on [control=['if'], data=[]]
# Get authentication type
authentication_type = current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER_TYPE']
# Check if the type should be checked
if authentication_type is not None:
# Get the prefix and the token
(prefix, token) = token.split()
# Check if the type matches
if prefix != authentication_type:
raise JWTInvalidHeaderError # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['authentication_type']]
try:
# Get the token data
decode = jwt_decode_token(token)
# Check the integrity of the user
if current_user.get_id() != decode.get('sub'):
raise JWTInvalidIssuer # depends on [control=['if'], data=[]]
return decode # depends on [control=['try'], data=[]]
except _JWTDecodeError as exc:
raise_from(JWTDecodeError(), exc) # depends on [control=['except'], data=['exc']]
except _JWTExpiredToken as exc:
raise_from(JWTExpiredToken(), exc) # depends on [control=['except'], data=['exc']]
|
def updateReferencedFile(self, pid, path, name, ref_url):
"""Deprecated, use updateReferenceURL. Update a Referenced Content File (.url)
:param pid: The HydroShare ID of the resource for which the file should be updated
:param path: Folder path for the file to be updated in
:param name: Filename for the referenced file
:param ref_url: url to be updated in the referenced file
:return: JsonResponse on success or HttpResponse with error status code on error
:raises: HydroShareNotAuthorized if user is not authorized to perform action.
:raises: HydroShareNotFound if the resource or resource file was not found.
:raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
"""
url = "{url_base}/resource/data_store_edit_reference_url/".format(url_base=self.url_base)
data = {'res_id': pid, 'curr_path': path, 'url_filename': name, 'new_ref_url': ref_url}
r = self._request('POST', url, data=data)
if r.status_code != 200:
if r.status_code == 403:
raise HydroShareNotAuthorized(('POST', url))
elif r.status_code == 404:
raise HydroShareNotFound((pid,))
else:
raise HydroShareHTTPException((url, 'POST', r.status_code))
return r.json()
|
def function[updateReferencedFile, parameter[self, pid, path, name, ref_url]]:
constant[Deprecated, use updateReferenceURL. Update a Referenced Content File (.url)
:param pid: The HydroShare ID of the resource for which the file should be updated
:param path: Folder path for the file to be updated in
:param name: Filename for the referenced file
:param ref_url: url to be updated in the referenced file
:return: JsonResponse on success or HttpResponse with error status code on error
:raises: HydroShareNotAuthorized if user is not authorized to perform action.
:raises: HydroShareNotFound if the resource or resource file was not found.
:raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
]
variable[url] assign[=] call[constant[{url_base}/resource/data_store_edit_reference_url/].format, parameter[]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2589de0>, <ast.Constant object at 0x7da1b2589a20>, <ast.Constant object at 0x7da1b2589ea0>, <ast.Constant object at 0x7da1b258b550>], [<ast.Name object at 0x7da1b258b0d0>, <ast.Name object at 0x7da1b2589ae0>, <ast.Name object at 0x7da1b2589c00>, <ast.Name object at 0x7da1b2589510>]]
variable[r] assign[=] call[name[self]._request, parameter[constant[POST], name[url]]]
if compare[name[r].status_code not_equal[!=] constant[200]] begin[:]
if compare[name[r].status_code equal[==] constant[403]] begin[:]
<ast.Raise object at 0x7da1b258a620>
return[call[name[r].json, parameter[]]]
|
keyword[def] identifier[updateReferencedFile] ( identifier[self] , identifier[pid] , identifier[path] , identifier[name] , identifier[ref_url] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[url_base] = identifier[self] . identifier[url_base] )
identifier[data] ={ literal[string] : identifier[pid] , literal[string] : identifier[path] , literal[string] : identifier[name] , literal[string] : identifier[ref_url] }
identifier[r] = identifier[self] . identifier[_request] ( literal[string] , identifier[url] , identifier[data] = identifier[data] )
keyword[if] identifier[r] . identifier[status_code] != literal[int] :
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[raise] identifier[HydroShareNotAuthorized] (( literal[string] , identifier[url] ))
keyword[elif] identifier[r] . identifier[status_code] == literal[int] :
keyword[raise] identifier[HydroShareNotFound] (( identifier[pid] ,))
keyword[else] :
keyword[raise] identifier[HydroShareHTTPException] (( identifier[url] , literal[string] , identifier[r] . identifier[status_code] ))
keyword[return] identifier[r] . identifier[json] ()
|
def updateReferencedFile(self, pid, path, name, ref_url):
"""Deprecated, use updateReferenceURL. Update a Referenced Content File (.url)
:param pid: The HydroShare ID of the resource for which the file should be updated
:param path: Folder path for the file to be updated in
:param name: Filename for the referenced file
:param ref_url: url to be updated in the referenced file
:return: JsonResponse on success or HttpResponse with error status code on error
:raises: HydroShareNotAuthorized if user is not authorized to perform action.
:raises: HydroShareNotFound if the resource or resource file was not found.
:raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
"""
url = '{url_base}/resource/data_store_edit_reference_url/'.format(url_base=self.url_base)
data = {'res_id': pid, 'curr_path': path, 'url_filename': name, 'new_ref_url': ref_url}
r = self._request('POST', url, data=data)
if r.status_code != 200:
if r.status_code == 403:
raise HydroShareNotAuthorized(('POST', url)) # depends on [control=['if'], data=[]]
elif r.status_code == 404:
raise HydroShareNotFound((pid,)) # depends on [control=['if'], data=[]]
else:
raise HydroShareHTTPException((url, 'POST', r.status_code)) # depends on [control=['if'], data=[]]
return r.json()
|
def delete_profile(hostname, username, password, profile_type, name):
'''
A function to connect to a bigip device and delete an existing profile.
hostname
The host/address of the bigip device
username
The iControl REST username
password
The iControl REST password
profile_type
The type of profile to delete
name
The name of the profile to delete
CLI Example::
salt '*' bigip.delete_profile bigip admin admin http my-http-profile
'''
#build sessions
bigip_session = _build_session(username, password)
#delete to REST
try:
response = bigip_session.delete(BIG_IP_URL_BASE.format(host=hostname)+'/ltm/profile/{type}/{name}'.format(type=profile_type, name=name))
except requests.exceptions.ConnectionError as e:
return _load_connection_error(hostname, e)
if _load_response(response) == '':
return True
else:
return _load_response(response)
|
def function[delete_profile, parameter[hostname, username, password, profile_type, name]]:
constant[
A function to connect to a bigip device and delete an existing profile.
hostname
The host/address of the bigip device
username
The iControl REST username
password
The iControl REST password
profile_type
The type of profile to delete
name
The name of the profile to delete
CLI Example::
salt '*' bigip.delete_profile bigip admin admin http my-http-profile
]
variable[bigip_session] assign[=] call[name[_build_session], parameter[name[username], name[password]]]
<ast.Try object at 0x7da204620640>
if compare[call[name[_load_response], parameter[name[response]]] equal[==] constant[]] begin[:]
return[constant[True]]
|
keyword[def] identifier[delete_profile] ( identifier[hostname] , identifier[username] , identifier[password] , identifier[profile_type] , identifier[name] ):
literal[string]
identifier[bigip_session] = identifier[_build_session] ( identifier[username] , identifier[password] )
keyword[try] :
identifier[response] = identifier[bigip_session] . identifier[delete] ( identifier[BIG_IP_URL_BASE] . identifier[format] ( identifier[host] = identifier[hostname] )+ literal[string] . identifier[format] ( identifier[type] = identifier[profile_type] , identifier[name] = identifier[name] ))
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
keyword[return] identifier[_load_connection_error] ( identifier[hostname] , identifier[e] )
keyword[if] identifier[_load_response] ( identifier[response] )== literal[string] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] identifier[_load_response] ( identifier[response] )
|
def delete_profile(hostname, username, password, profile_type, name):
"""
A function to connect to a bigip device and delete an existing profile.
hostname
The host/address of the bigip device
username
The iControl REST username
password
The iControl REST password
profile_type
The type of profile to delete
name
The name of the profile to delete
CLI Example::
salt '*' bigip.delete_profile bigip admin admin http my-http-profile
"""
#build sessions
bigip_session = _build_session(username, password)
#delete to REST
try:
response = bigip_session.delete(BIG_IP_URL_BASE.format(host=hostname) + '/ltm/profile/{type}/{name}'.format(type=profile_type, name=name)) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
return _load_connection_error(hostname, e) # depends on [control=['except'], data=['e']]
if _load_response(response) == '':
return True # depends on [control=['if'], data=[]]
else:
return _load_response(response)
|
def get_references(basis_name, elements=None, version=None, fmt=None, data_dir=None):
'''Get the references/citations for a basis set
Parameters
----------
basis_name : str
Name of the basis set. This is not case sensitive.
elements : list
List of element numbers that you want the basis set for. By default,
all elements for which the basis set is defined are included.
version : int
Obtain a specific version of this basis set. By default,
the latest version is returned.
fmt: str
The desired output format of the basis set references. By default,
basis set information is returned as a list of dictionaries. Use
get_reference_formats() to programmatically obtain the available formats.
The `fmt` argument is not case sensitive.
Available reference formats are
* bib
* txt
* json
data_dir : str
Data directory with all the basis set information. By default,
it is in the 'data' subdirectory of this project.
Returns
-------
str or dict
The references for the given basis set in the desired format. If `fmt` is **None**, this will be a python
dictionary. Otherwise, it will be a string.
'''
data_dir = fix_data_dir(data_dir)
basis_dict = get_basis(basis_name, elements=elements, version=version, data_dir=data_dir)
all_ref_data = get_reference_data(data_dir)
ref_data = references.compact_references(basis_dict, all_ref_data)
if fmt is None:
return ref_data
return refconverters.convert_references(ref_data, fmt)
|
def function[get_references, parameter[basis_name, elements, version, fmt, data_dir]]:
constant[Get the references/citations for a basis set
Parameters
----------
basis_name : str
Name of the basis set. This is not case sensitive.
elements : list
List of element numbers that you want the basis set for. By default,
all elements for which the basis set is defined are included.
version : int
Obtain a specific version of this basis set. By default,
the latest version is returned.
fmt: str
The desired output format of the basis set references. By default,
basis set information is returned as a list of dictionaries. Use
get_reference_formats() to programmatically obtain the available formats.
The `fmt` argument is not case sensitive.
Available reference formats are
* bib
* txt
* json
data_dir : str
Data directory with all the basis set information. By default,
it is in the 'data' subdirectory of this project.
Returns
-------
str or dict
The references for the given basis set in the desired format. If `fmt` is **None**, this will be a python
dictionary. Otherwise, it will be a string.
]
variable[data_dir] assign[=] call[name[fix_data_dir], parameter[name[data_dir]]]
variable[basis_dict] assign[=] call[name[get_basis], parameter[name[basis_name]]]
variable[all_ref_data] assign[=] call[name[get_reference_data], parameter[name[data_dir]]]
variable[ref_data] assign[=] call[name[references].compact_references, parameter[name[basis_dict], name[all_ref_data]]]
if compare[name[fmt] is constant[None]] begin[:]
return[name[ref_data]]
return[call[name[refconverters].convert_references, parameter[name[ref_data], name[fmt]]]]
|
keyword[def] identifier[get_references] ( identifier[basis_name] , identifier[elements] = keyword[None] , identifier[version] = keyword[None] , identifier[fmt] = keyword[None] , identifier[data_dir] = keyword[None] ):
literal[string]
identifier[data_dir] = identifier[fix_data_dir] ( identifier[data_dir] )
identifier[basis_dict] = identifier[get_basis] ( identifier[basis_name] , identifier[elements] = identifier[elements] , identifier[version] = identifier[version] , identifier[data_dir] = identifier[data_dir] )
identifier[all_ref_data] = identifier[get_reference_data] ( identifier[data_dir] )
identifier[ref_data] = identifier[references] . identifier[compact_references] ( identifier[basis_dict] , identifier[all_ref_data] )
keyword[if] identifier[fmt] keyword[is] keyword[None] :
keyword[return] identifier[ref_data]
keyword[return] identifier[refconverters] . identifier[convert_references] ( identifier[ref_data] , identifier[fmt] )
|
def get_references(basis_name, elements=None, version=None, fmt=None, data_dir=None):
"""Get the references/citations for a basis set
Parameters
----------
basis_name : str
Name of the basis set. This is not case sensitive.
elements : list
List of element numbers that you want the basis set for. By default,
all elements for which the basis set is defined are included.
version : int
Obtain a specific version of this basis set. By default,
the latest version is returned.
fmt: str
The desired output format of the basis set references. By default,
basis set information is returned as a list of dictionaries. Use
get_reference_formats() to programmatically obtain the available formats.
The `fmt` argument is not case sensitive.
Available reference formats are
* bib
* txt
* json
data_dir : str
Data directory with all the basis set information. By default,
it is in the 'data' subdirectory of this project.
Returns
-------
str or dict
The references for the given basis set in the desired format. If `fmt` is **None**, this will be a python
dictionary. Otherwise, it will be a string.
"""
data_dir = fix_data_dir(data_dir)
basis_dict = get_basis(basis_name, elements=elements, version=version, data_dir=data_dir)
all_ref_data = get_reference_data(data_dir)
ref_data = references.compact_references(basis_dict, all_ref_data)
if fmt is None:
return ref_data # depends on [control=['if'], data=[]]
return refconverters.convert_references(ref_data, fmt)
|
def set_(key, value, service=None, profile=None):
'''
Set a key/value pair in a keyring service
'''
service = _get_service(service, profile)
keyring.set_password(service, key, value)
|
def function[set_, parameter[key, value, service, profile]]:
constant[
Set a key/value pair in a keyring service
]
variable[service] assign[=] call[name[_get_service], parameter[name[service], name[profile]]]
call[name[keyring].set_password, parameter[name[service], name[key], name[value]]]
|
keyword[def] identifier[set_] ( identifier[key] , identifier[value] , identifier[service] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[service] = identifier[_get_service] ( identifier[service] , identifier[profile] )
identifier[keyring] . identifier[set_password] ( identifier[service] , identifier[key] , identifier[value] )
|
def set_(key, value, service=None, profile=None):
"""
Set a key/value pair in a keyring service
"""
service = _get_service(service, profile)
keyring.set_password(service, key, value)
|
def load_single_dict(pinyin_dict, style='default'):
"""载入用户自定义的单字拼音库
:param pinyin_dict: 单字拼音库。比如: ``{0x963F: u"ā,ē"}``
:param style: pinyin_dict 参数值的拼音库风格. 支持 'default', 'tone2'
:type pinyin_dict: dict
"""
if style == 'tone2':
for k, v in pinyin_dict.items():
v = _replace_tone2_style_dict_to_default(v)
PINYIN_DICT[k] = v
else:
PINYIN_DICT.update(pinyin_dict)
mmseg.retrain(mmseg.seg)
|
def function[load_single_dict, parameter[pinyin_dict, style]]:
constant[载入用户自定义的单字拼音库
:param pinyin_dict: 单字拼音库。比如: ``{0x963F: u"ā,ē"}``
:param style: pinyin_dict 参数值的拼音库风格. 支持 'default', 'tone2'
:type pinyin_dict: dict
]
if compare[name[style] equal[==] constant[tone2]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1b63070>, <ast.Name object at 0x7da1b1b62e90>]]] in starred[call[name[pinyin_dict].items, parameter[]]] begin[:]
variable[v] assign[=] call[name[_replace_tone2_style_dict_to_default], parameter[name[v]]]
call[name[PINYIN_DICT]][name[k]] assign[=] name[v]
call[name[mmseg].retrain, parameter[name[mmseg].seg]]
|
keyword[def] identifier[load_single_dict] ( identifier[pinyin_dict] , identifier[style] = literal[string] ):
literal[string]
keyword[if] identifier[style] == literal[string] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[pinyin_dict] . identifier[items] ():
identifier[v] = identifier[_replace_tone2_style_dict_to_default] ( identifier[v] )
identifier[PINYIN_DICT] [ identifier[k] ]= identifier[v]
keyword[else] :
identifier[PINYIN_DICT] . identifier[update] ( identifier[pinyin_dict] )
identifier[mmseg] . identifier[retrain] ( identifier[mmseg] . identifier[seg] )
|
def load_single_dict(pinyin_dict, style='default'):
"""载入用户自定义的单字拼音库
:param pinyin_dict: 单字拼音库。比如: ``{0x963F: u"ā,ē"}``
:param style: pinyin_dict 参数值的拼音库风格. 支持 'default', 'tone2'
:type pinyin_dict: dict
"""
if style == 'tone2':
for (k, v) in pinyin_dict.items():
v = _replace_tone2_style_dict_to_default(v)
PINYIN_DICT[k] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
PINYIN_DICT.update(pinyin_dict)
mmseg.retrain(mmseg.seg)
|
def main(device_type):
"""Run ssh-agent using given hardware client factory."""
args = create_agent_parser(device_type=device_type).parse_args()
util.setup_logging(verbosity=args.verbose, filename=args.log_file)
public_keys = None
filename = None
if args.identity.startswith('/'):
filename = args.identity
contents = open(filename, 'rb').read().decode('utf-8')
# Allow loading previously exported SSH public keys
if filename.endswith('.pub'):
public_keys = list(import_public_keys(contents))
identities = list(parse_config(contents))
else:
identities = [device.interface.Identity(
identity_str=args.identity, curve_name=args.ecdsa_curve_name)]
for index, identity in enumerate(identities):
identity.identity_dict['proto'] = u'ssh'
log.info('identity #%d: %s', index, identity.to_string())
# override default PIN/passphrase entry tools (relevant for TREZOR/Keepkey):
device_type.ui = device.ui.UI(device_type=device_type, config=vars(args))
device_type.ui.cached_passphrase_ack = util.ExpiringCache(
args.cache_expiry_seconds)
conn = JustInTimeConnection(
conn_factory=lambda: client.Client(device_type()),
identities=identities, public_keys=public_keys)
sock_path = _get_sock_path(args)
command = args.command
context = _dummy_context()
if args.connect:
command = ['ssh'] + ssh_args(conn) + args.command
elif args.mosh:
command = ['mosh'] + mosh_args(conn) + args.command
elif args.daemonize:
out = 'SSH_AUTH_SOCK={0}; export SSH_AUTH_SOCK;\n'.format(sock_path)
sys.stdout.write(out)
sys.stdout.flush()
context = daemon.DaemonContext()
log.info('running the agent as a daemon on %s', sock_path)
elif args.foreground:
log.info('running the agent on %s', sock_path)
use_shell = bool(args.shell)
if use_shell:
command = os.environ['SHELL']
sys.stdin.close()
if command or args.daemonize or args.foreground:
with context:
return run_server(conn=conn, command=command, sock_path=sock_path,
debug=args.debug, timeout=args.timeout)
else:
for pk in conn.public_keys():
sys.stdout.write(pk)
return 0
|
def function[main, parameter[device_type]]:
constant[Run ssh-agent using given hardware client factory.]
variable[args] assign[=] call[call[name[create_agent_parser], parameter[]].parse_args, parameter[]]
call[name[util].setup_logging, parameter[]]
variable[public_keys] assign[=] constant[None]
variable[filename] assign[=] constant[None]
if call[name[args].identity.startswith, parameter[constant[/]]] begin[:]
variable[filename] assign[=] name[args].identity
variable[contents] assign[=] call[call[call[name[open], parameter[name[filename], constant[rb]]].read, parameter[]].decode, parameter[constant[utf-8]]]
if call[name[filename].endswith, parameter[constant[.pub]]] begin[:]
variable[public_keys] assign[=] call[name[list], parameter[call[name[import_public_keys], parameter[name[contents]]]]]
variable[identities] assign[=] call[name[list], parameter[call[name[parse_config], parameter[name[contents]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1253af0>, <ast.Name object at 0x7da1b1251960>]]] in starred[call[name[enumerate], parameter[name[identities]]]] begin[:]
call[name[identity].identity_dict][constant[proto]] assign[=] constant[ssh]
call[name[log].info, parameter[constant[identity #%d: %s], name[index], call[name[identity].to_string, parameter[]]]]
name[device_type].ui assign[=] call[name[device].ui.UI, parameter[]]
name[device_type].ui.cached_passphrase_ack assign[=] call[name[util].ExpiringCache, parameter[name[args].cache_expiry_seconds]]
variable[conn] assign[=] call[name[JustInTimeConnection], parameter[]]
variable[sock_path] assign[=] call[name[_get_sock_path], parameter[name[args]]]
variable[command] assign[=] name[args].command
variable[context] assign[=] call[name[_dummy_context], parameter[]]
if name[args].connect begin[:]
variable[command] assign[=] binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b12daa40>]] + call[name[ssh_args], parameter[name[conn]]]] + name[args].command]
variable[use_shell] assign[=] call[name[bool], parameter[name[args].shell]]
if name[use_shell] begin[:]
variable[command] assign[=] call[name[os].environ][constant[SHELL]]
call[name[sys].stdin.close, parameter[]]
if <ast.BoolOp object at 0x7da1b1213d30> begin[:]
with name[context] begin[:]
return[call[name[run_server], parameter[]]]
|
keyword[def] identifier[main] ( identifier[device_type] ):
literal[string]
identifier[args] = identifier[create_agent_parser] ( identifier[device_type] = identifier[device_type] ). identifier[parse_args] ()
identifier[util] . identifier[setup_logging] ( identifier[verbosity] = identifier[args] . identifier[verbose] , identifier[filename] = identifier[args] . identifier[log_file] )
identifier[public_keys] = keyword[None]
identifier[filename] = keyword[None]
keyword[if] identifier[args] . identifier[identity] . identifier[startswith] ( literal[string] ):
identifier[filename] = identifier[args] . identifier[identity]
identifier[contents] = identifier[open] ( identifier[filename] , literal[string] ). identifier[read] (). identifier[decode] ( literal[string] )
keyword[if] identifier[filename] . identifier[endswith] ( literal[string] ):
identifier[public_keys] = identifier[list] ( identifier[import_public_keys] ( identifier[contents] ))
identifier[identities] = identifier[list] ( identifier[parse_config] ( identifier[contents] ))
keyword[else] :
identifier[identities] =[ identifier[device] . identifier[interface] . identifier[Identity] (
identifier[identity_str] = identifier[args] . identifier[identity] , identifier[curve_name] = identifier[args] . identifier[ecdsa_curve_name] )]
keyword[for] identifier[index] , identifier[identity] keyword[in] identifier[enumerate] ( identifier[identities] ):
identifier[identity] . identifier[identity_dict] [ literal[string] ]= literal[string]
identifier[log] . identifier[info] ( literal[string] , identifier[index] , identifier[identity] . identifier[to_string] ())
identifier[device_type] . identifier[ui] = identifier[device] . identifier[ui] . identifier[UI] ( identifier[device_type] = identifier[device_type] , identifier[config] = identifier[vars] ( identifier[args] ))
identifier[device_type] . identifier[ui] . identifier[cached_passphrase_ack] = identifier[util] . identifier[ExpiringCache] (
identifier[args] . identifier[cache_expiry_seconds] )
identifier[conn] = identifier[JustInTimeConnection] (
identifier[conn_factory] = keyword[lambda] : identifier[client] . identifier[Client] ( identifier[device_type] ()),
identifier[identities] = identifier[identities] , identifier[public_keys] = identifier[public_keys] )
identifier[sock_path] = identifier[_get_sock_path] ( identifier[args] )
identifier[command] = identifier[args] . identifier[command]
identifier[context] = identifier[_dummy_context] ()
keyword[if] identifier[args] . identifier[connect] :
identifier[command] =[ literal[string] ]+ identifier[ssh_args] ( identifier[conn] )+ identifier[args] . identifier[command]
keyword[elif] identifier[args] . identifier[mosh] :
identifier[command] =[ literal[string] ]+ identifier[mosh_args] ( identifier[conn] )+ identifier[args] . identifier[command]
keyword[elif] identifier[args] . identifier[daemonize] :
identifier[out] = literal[string] . identifier[format] ( identifier[sock_path] )
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[out] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[context] = identifier[daemon] . identifier[DaemonContext] ()
identifier[log] . identifier[info] ( literal[string] , identifier[sock_path] )
keyword[elif] identifier[args] . identifier[foreground] :
identifier[log] . identifier[info] ( literal[string] , identifier[sock_path] )
identifier[use_shell] = identifier[bool] ( identifier[args] . identifier[shell] )
keyword[if] identifier[use_shell] :
identifier[command] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[sys] . identifier[stdin] . identifier[close] ()
keyword[if] identifier[command] keyword[or] identifier[args] . identifier[daemonize] keyword[or] identifier[args] . identifier[foreground] :
keyword[with] identifier[context] :
keyword[return] identifier[run_server] ( identifier[conn] = identifier[conn] , identifier[command] = identifier[command] , identifier[sock_path] = identifier[sock_path] ,
identifier[debug] = identifier[args] . identifier[debug] , identifier[timeout] = identifier[args] . identifier[timeout] )
keyword[else] :
keyword[for] identifier[pk] keyword[in] identifier[conn] . identifier[public_keys] ():
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[pk] )
keyword[return] literal[int]
|
def main(device_type):
"""Run ssh-agent using given hardware client factory."""
args = create_agent_parser(device_type=device_type).parse_args()
util.setup_logging(verbosity=args.verbose, filename=args.log_file)
public_keys = None
filename = None
if args.identity.startswith('/'):
filename = args.identity
contents = open(filename, 'rb').read().decode('utf-8')
# Allow loading previously exported SSH public keys
if filename.endswith('.pub'):
public_keys = list(import_public_keys(contents)) # depends on [control=['if'], data=[]]
identities = list(parse_config(contents)) # depends on [control=['if'], data=[]]
else:
identities = [device.interface.Identity(identity_str=args.identity, curve_name=args.ecdsa_curve_name)]
for (index, identity) in enumerate(identities):
identity.identity_dict['proto'] = u'ssh'
log.info('identity #%d: %s', index, identity.to_string()) # depends on [control=['for'], data=[]]
# override default PIN/passphrase entry tools (relevant for TREZOR/Keepkey):
device_type.ui = device.ui.UI(device_type=device_type, config=vars(args))
device_type.ui.cached_passphrase_ack = util.ExpiringCache(args.cache_expiry_seconds)
conn = JustInTimeConnection(conn_factory=lambda : client.Client(device_type()), identities=identities, public_keys=public_keys)
sock_path = _get_sock_path(args)
command = args.command
context = _dummy_context()
if args.connect:
command = ['ssh'] + ssh_args(conn) + args.command # depends on [control=['if'], data=[]]
elif args.mosh:
command = ['mosh'] + mosh_args(conn) + args.command # depends on [control=['if'], data=[]]
elif args.daemonize:
out = 'SSH_AUTH_SOCK={0}; export SSH_AUTH_SOCK;\n'.format(sock_path)
sys.stdout.write(out)
sys.stdout.flush()
context = daemon.DaemonContext()
log.info('running the agent as a daemon on %s', sock_path) # depends on [control=['if'], data=[]]
elif args.foreground:
log.info('running the agent on %s', sock_path) # depends on [control=['if'], data=[]]
use_shell = bool(args.shell)
if use_shell:
command = os.environ['SHELL']
sys.stdin.close() # depends on [control=['if'], data=[]]
if command or args.daemonize or args.foreground:
with context:
return run_server(conn=conn, command=command, sock_path=sock_path, debug=args.debug, timeout=args.timeout) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else:
for pk in conn.public_keys():
sys.stdout.write(pk) # depends on [control=['for'], data=['pk']]
return 0
|
def unpack_types(types, args, argnames, major):
"""Parse arguments according to types list.
Parameters
----------
types : list of kattypes
The types of the arguments (in order).
args : list of strings
The arguments to parse.
argnames : list of strings
The names of the arguments.
major : integer
Major version of KATCP to use when packing types
"""
if len(types) > 0:
multiple = types[-1]._multiple
else:
multiple = False
if len(types) < len(args) and not multiple:
raise FailReply("Too many parameters given.")
# Wrap the types in parameter objects
params = []
for i, kattype in enumerate(types):
name = ""
if i < len(argnames):
name = argnames[i]
params.append(Parameter(i+1, name, kattype, major))
if len(args) > len(types) and multiple:
for i in range(len(types), len(args)):
params.append(Parameter(i+1, name, kattype, major))
# if len(args) < len(types) this passes in None for missing args
return map(lambda param, arg: param.unpack(arg), params, args)
|
def function[unpack_types, parameter[types, args, argnames, major]]:
constant[Parse arguments according to types list.
Parameters
----------
types : list of kattypes
The types of the arguments (in order).
args : list of strings
The arguments to parse.
argnames : list of strings
The names of the arguments.
major : integer
Major version of KATCP to use when packing types
]
if compare[call[name[len], parameter[name[types]]] greater[>] constant[0]] begin[:]
variable[multiple] assign[=] call[name[types]][<ast.UnaryOp object at 0x7da1b0431e10>]._multiple
if <ast.BoolOp object at 0x7da1b04310c0> begin[:]
<ast.Raise object at 0x7da1b0430d30>
variable[params] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b04904f0>, <ast.Name object at 0x7da1b04937c0>]]] in starred[call[name[enumerate], parameter[name[types]]]] begin[:]
variable[name] assign[=] constant[]
if compare[name[i] less[<] call[name[len], parameter[name[argnames]]]] begin[:]
variable[name] assign[=] call[name[argnames]][name[i]]
call[name[params].append, parameter[call[name[Parameter], parameter[binary_operation[name[i] + constant[1]], name[name], name[kattype], name[major]]]]]
if <ast.BoolOp object at 0x7da1b052b8b0> begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[types]]], call[name[len], parameter[name[args]]]]]] begin[:]
call[name[params].append, parameter[call[name[Parameter], parameter[binary_operation[name[i] + constant[1]], name[name], name[kattype], name[major]]]]]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b0529ff0>, name[params], name[args]]]]
|
keyword[def] identifier[unpack_types] ( identifier[types] , identifier[args] , identifier[argnames] , identifier[major] ):
literal[string]
keyword[if] identifier[len] ( identifier[types] )> literal[int] :
identifier[multiple] = identifier[types] [- literal[int] ]. identifier[_multiple]
keyword[else] :
identifier[multiple] = keyword[False]
keyword[if] identifier[len] ( identifier[types] )< identifier[len] ( identifier[args] ) keyword[and] keyword[not] identifier[multiple] :
keyword[raise] identifier[FailReply] ( literal[string] )
identifier[params] =[]
keyword[for] identifier[i] , identifier[kattype] keyword[in] identifier[enumerate] ( identifier[types] ):
identifier[name] = literal[string]
keyword[if] identifier[i] < identifier[len] ( identifier[argnames] ):
identifier[name] = identifier[argnames] [ identifier[i] ]
identifier[params] . identifier[append] ( identifier[Parameter] ( identifier[i] + literal[int] , identifier[name] , identifier[kattype] , identifier[major] ))
keyword[if] identifier[len] ( identifier[args] )> identifier[len] ( identifier[types] ) keyword[and] identifier[multiple] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[types] ), identifier[len] ( identifier[args] )):
identifier[params] . identifier[append] ( identifier[Parameter] ( identifier[i] + literal[int] , identifier[name] , identifier[kattype] , identifier[major] ))
keyword[return] identifier[map] ( keyword[lambda] identifier[param] , identifier[arg] : identifier[param] . identifier[unpack] ( identifier[arg] ), identifier[params] , identifier[args] )
|
def unpack_types(types, args, argnames, major):
"""Parse arguments according to types list.
Parameters
----------
types : list of kattypes
The types of the arguments (in order).
args : list of strings
The arguments to parse.
argnames : list of strings
The names of the arguments.
major : integer
Major version of KATCP to use when packing types
"""
if len(types) > 0:
multiple = types[-1]._multiple # depends on [control=['if'], data=[]]
else:
multiple = False
if len(types) < len(args) and (not multiple):
raise FailReply('Too many parameters given.') # depends on [control=['if'], data=[]]
# Wrap the types in parameter objects
params = []
for (i, kattype) in enumerate(types):
name = ''
if i < len(argnames):
name = argnames[i] # depends on [control=['if'], data=['i']]
params.append(Parameter(i + 1, name, kattype, major)) # depends on [control=['for'], data=[]]
if len(args) > len(types) and multiple:
for i in range(len(types), len(args)):
params.append(Parameter(i + 1, name, kattype, major)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
# if len(args) < len(types) this passes in None for missing args
return map(lambda param, arg: param.unpack(arg), params, args)
|
def start(self, join=False):
"""
when calling with join=True, must call it in main thread, or else, the Keyboard Interrupt won't be caputured.
:param join: default False, if hold on until the worker is stopped by Ctrl+C or other reasons.
:return:
"""
Thread.start(self)
if join:
try:
while self.is_alive():
self.join(timeout=60)
self.logger.info("worker {0} exit unexpected, try to shutdown it".format(self.option.consumer_name))
self.shutdown()
except KeyboardInterrupt:
self.logger.info("*** try to exit **** ")
self.shutdown()
|
def function[start, parameter[self, join]]:
constant[
when calling with join=True, must call it in main thread, or else, the Keyboard Interrupt won't be caputured.
:param join: default False, if hold on until the worker is stopped by Ctrl+C or other reasons.
:return:
]
call[name[Thread].start, parameter[name[self]]]
if name[join] begin[:]
<ast.Try object at 0x7da1b08a30a0>
|
keyword[def] identifier[start] ( identifier[self] , identifier[join] = keyword[False] ):
literal[string]
identifier[Thread] . identifier[start] ( identifier[self] )
keyword[if] identifier[join] :
keyword[try] :
keyword[while] identifier[self] . identifier[is_alive] ():
identifier[self] . identifier[join] ( identifier[timeout] = literal[int] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[option] . identifier[consumer_name] ))
identifier[self] . identifier[shutdown] ()
keyword[except] identifier[KeyboardInterrupt] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[shutdown] ()
|
def start(self, join=False):
"""
when calling with join=True, must call it in main thread, or else, the Keyboard Interrupt won't be caputured.
:param join: default False, if hold on until the worker is stopped by Ctrl+C or other reasons.
:return:
"""
Thread.start(self)
if join:
try:
while self.is_alive():
self.join(timeout=60) # depends on [control=['while'], data=[]]
self.logger.info('worker {0} exit unexpected, try to shutdown it'.format(self.option.consumer_name))
self.shutdown() # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
self.logger.info('*** try to exit **** ')
self.shutdown() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
|
def array_indented(level, l, quote_char='\'', comma_after=False):
# type: (int, List[str], str, bool) -> str
"""
return an array indented according to indent level
:param level:
:param l:
:param quote_char:
:param comma_after:
:return:
"""
out = "[\n"
for x in l:
out += (((level+1) * 4) * " ") + '{}{}{}'.format(quote_char, x, quote_char) + ",\n"
out += ((level * 4) * " ") + "]"
if comma_after:
out += ","
return out
|
def function[array_indented, parameter[level, l, quote_char, comma_after]]:
constant[
return an array indented according to indent level
:param level:
:param l:
:param quote_char:
:param comma_after:
:return:
]
variable[out] assign[=] constant[[
]
for taget[name[x]] in starred[name[l]] begin[:]
<ast.AugAssign object at 0x7da18ede7a60>
<ast.AugAssign object at 0x7da18f00dc90>
if name[comma_after] begin[:]
<ast.AugAssign object at 0x7da18f00c970>
return[name[out]]
|
keyword[def] identifier[array_indented] ( identifier[level] , identifier[l] , identifier[quote_char] = literal[string] , identifier[comma_after] = keyword[False] ):
literal[string]
identifier[out] = literal[string]
keyword[for] identifier[x] keyword[in] identifier[l] :
identifier[out] +=((( identifier[level] + literal[int] )* literal[int] )* literal[string] )+ literal[string] . identifier[format] ( identifier[quote_char] , identifier[x] , identifier[quote_char] )+ literal[string]
identifier[out] +=(( identifier[level] * literal[int] )* literal[string] )+ literal[string]
keyword[if] identifier[comma_after] :
identifier[out] += literal[string]
keyword[return] identifier[out]
|
def array_indented(level, l, quote_char="'", comma_after=False):
# type: (int, List[str], str, bool) -> str
'\n return an array indented according to indent level\n :param level:\n :param l:\n :param quote_char:\n :param comma_after:\n :return:\n '
out = '[\n'
for x in l:
out += (level + 1) * 4 * ' ' + '{}{}{}'.format(quote_char, x, quote_char) + ',\n' # depends on [control=['for'], data=['x']]
out += level * 4 * ' ' + ']'
if comma_after:
out += ',' # depends on [control=['if'], data=[]]
return out
|
def _previous(self):
"""Get the previous summary and present it."""
self.summaries.rotate()
current_summary = self.summaries[0]
self._update_summary(current_summary)
|
def function[_previous, parameter[self]]:
constant[Get the previous summary and present it.]
call[name[self].summaries.rotate, parameter[]]
variable[current_summary] assign[=] call[name[self].summaries][constant[0]]
call[name[self]._update_summary, parameter[name[current_summary]]]
|
keyword[def] identifier[_previous] ( identifier[self] ):
literal[string]
identifier[self] . identifier[summaries] . identifier[rotate] ()
identifier[current_summary] = identifier[self] . identifier[summaries] [ literal[int] ]
identifier[self] . identifier[_update_summary] ( identifier[current_summary] )
|
def _previous(self):
"""Get the previous summary and present it."""
self.summaries.rotate()
current_summary = self.summaries[0]
self._update_summary(current_summary)
|
def clean(self, value):
"""Cleans and returns the given value, or raises a ParameterNotValidError exception"""
if isinstance(value, six.string_types) and value.lower() == 'false':
return False
return bool(value)
|
def function[clean, parameter[self, value]]:
constant[Cleans and returns the given value, or raises a ParameterNotValidError exception]
if <ast.BoolOp object at 0x7da2054a60b0> begin[:]
return[constant[False]]
return[call[name[bool], parameter[name[value]]]]
|
keyword[def] identifier[clean] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ) keyword[and] identifier[value] . identifier[lower] ()== literal[string] :
keyword[return] keyword[False]
keyword[return] identifier[bool] ( identifier[value] )
|
def clean(self, value):
"""Cleans and returns the given value, or raises a ParameterNotValidError exception"""
if isinstance(value, six.string_types) and value.lower() == 'false':
return False # depends on [control=['if'], data=[]]
return bool(value)
|
def read_file(file_path_name):
"""
Read the content of the specified file.
@param file_path_name: path and name of the file to read.
@return: content of the specified file.
"""
with io.open(os.path.join(os.path.dirname(__file__), file_path_name), mode='rt', encoding='utf-8') as fd:
return fd.read()
|
def function[read_file, parameter[file_path_name]]:
constant[
Read the content of the specified file.
@param file_path_name: path and name of the file to read.
@return: content of the specified file.
]
with call[name[io].open, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], name[file_path_name]]]]] begin[:]
return[call[name[fd].read, parameter[]]]
|
keyword[def] identifier[read_file] ( identifier[file_path_name] ):
literal[string]
keyword[with] identifier[io] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), identifier[file_path_name] ), identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[fd] :
keyword[return] identifier[fd] . identifier[read] ()
|
def read_file(file_path_name):
"""
Read the content of the specified file.
@param file_path_name: path and name of the file to read.
@return: content of the specified file.
"""
with io.open(os.path.join(os.path.dirname(__file__), file_path_name), mode='rt', encoding='utf-8') as fd:
return fd.read() # depends on [control=['with'], data=['fd']]
|
def get_nic(self, datacenter_id, server_id, nic_id, depth=1):
"""
Retrieves a NIC by its ID.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param depth: The depth of the response data.
:type depth: ``int``
"""
response = self._perform_request(
'/datacenters/%s/servers/%s/nics/%s?depth=%s' % (
datacenter_id,
server_id,
nic_id,
str(depth)))
return response
|
def function[get_nic, parameter[self, datacenter_id, server_id, nic_id, depth]]:
constant[
Retrieves a NIC by its ID.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param depth: The depth of the response data.
:type depth: ``int``
]
variable[response] assign[=] call[name[self]._perform_request, parameter[binary_operation[constant[/datacenters/%s/servers/%s/nics/%s?depth=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26af760>, <ast.Name object at 0x7da1b26ad150>, <ast.Name object at 0x7da1b26aeaa0>, <ast.Call object at 0x7da1b26ad960>]]]]]
return[name[response]]
|
keyword[def] identifier[get_nic] ( identifier[self] , identifier[datacenter_id] , identifier[server_id] , identifier[nic_id] , identifier[depth] = literal[int] ):
literal[string]
identifier[response] = identifier[self] . identifier[_perform_request] (
literal[string] %(
identifier[datacenter_id] ,
identifier[server_id] ,
identifier[nic_id] ,
identifier[str] ( identifier[depth] )))
keyword[return] identifier[response]
|
def get_nic(self, datacenter_id, server_id, nic_id, depth=1):
"""
Retrieves a NIC by its ID.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param depth: The depth of the response data.
:type depth: ``int``
"""
response = self._perform_request('/datacenters/%s/servers/%s/nics/%s?depth=%s' % (datacenter_id, server_id, nic_id, str(depth)))
return response
|
def SetPassword(self,password):
"""Request change of password.
The API request requires supplying the current password. For this we issue a call
to retrieve the credentials so note there will be an activity log for retrieving the
credentials associated with any SetPassword entry
>>> s.SetPassword("newpassword")
"""
# 0: {op: "set", member: "password", value: {current: " r`5Mun/vT:qZ]2?z", password: "Savvis123!"}}
if self.data['status'] != "active": raise(clc.CLCException("Server must be powered on to change password"))
return(clc.v2.Requests(clc.v2.API.Call('PATCH','servers/%s/%s' % (self.alias,self.id),
json.dumps([{"op": "set", "member": "password", "value": {"current": self.Credentials()['password'], "password": password}}]),
session=self.session),
alias=self.alias,
session=self.session))
|
def function[SetPassword, parameter[self, password]]:
constant[Request change of password.
The API request requires supplying the current password. For this we issue a call
to retrieve the credentials so note there will be an activity log for retrieving the
credentials associated with any SetPassword entry
>>> s.SetPassword("newpassword")
]
if compare[call[name[self].data][constant[status]] not_equal[!=] constant[active]] begin[:]
<ast.Raise object at 0x7da1b2260970>
return[call[name[clc].v2.Requests, parameter[call[name[clc].v2.API.Call, parameter[constant[PATCH], binary_operation[constant[servers/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2260460>, <ast.Attribute object at 0x7da1b2260bb0>]]], call[name[json].dumps, parameter[list[[<ast.Dict object at 0x7da1b22613c0>]]]]]]]]]
|
keyword[def] identifier[SetPassword] ( identifier[self] , identifier[password] ):
literal[string]
keyword[if] identifier[self] . identifier[data] [ literal[string] ]!= literal[string] : keyword[raise] ( identifier[clc] . identifier[CLCException] ( literal[string] ))
keyword[return] ( identifier[clc] . identifier[v2] . identifier[Requests] ( identifier[clc] . identifier[v2] . identifier[API] . identifier[Call] ( literal[string] , literal[string] %( identifier[self] . identifier[alias] , identifier[self] . identifier[id] ),
identifier[json] . identifier[dumps] ([{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[self] . identifier[Credentials] ()[ literal[string] ], literal[string] : identifier[password] }}]),
identifier[session] = identifier[self] . identifier[session] ),
identifier[alias] = identifier[self] . identifier[alias] ,
identifier[session] = identifier[self] . identifier[session] ))
|
def SetPassword(self, password):
"""Request change of password.
The API request requires supplying the current password. For this we issue a call
to retrieve the credentials so note there will be an activity log for retrieving the
credentials associated with any SetPassword entry
>>> s.SetPassword("newpassword")
""" # 0: {op: "set", member: "password", value: {current: " r`5Mun/vT:qZ]2?z", password: "Savvis123!"}}
if self.data['status'] != 'active':
raise clc.CLCException('Server must be powered on to change password') # depends on [control=['if'], data=[]]
return clc.v2.Requests(clc.v2.API.Call('PATCH', 'servers/%s/%s' % (self.alias, self.id), json.dumps([{'op': 'set', 'member': 'password', 'value': {'current': self.Credentials()['password'], 'password': password}}]), session=self.session), alias=self.alias, session=self.session)
|
def set_start_date(self, date):
"""Sets the start date.
arg: date (osid.calendaring.DateTime): the new date
raise: InvalidArgument - ``date`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
raise: NullArgument - ``date`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if self.get_start_date_metadata().is_read_only():
raise errors.NoAccess()
if not self._is_valid_date_time(date, self.get_start_date_metadata()):
raise errors.InvalidArgument()
# self._my_map['startDate'] = self._get_date_map(date)
self._my_map['startDate'] = date
|
def function[set_start_date, parameter[self, date]]:
constant[Sets the start date.
arg: date (osid.calendaring.DateTime): the new date
raise: InvalidArgument - ``date`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
raise: NullArgument - ``date`` is ``null``
*compliance: mandatory -- This method must be implemented.*
]
if call[call[name[self].get_start_date_metadata, parameter[]].is_read_only, parameter[]] begin[:]
<ast.Raise object at 0x7da204962830>
if <ast.UnaryOp object at 0x7da204961750> begin[:]
<ast.Raise object at 0x7da1b0917e80>
call[name[self]._my_map][constant[startDate]] assign[=] name[date]
|
keyword[def] identifier[set_start_date] ( identifier[self] , identifier[date] ):
literal[string]
keyword[if] identifier[self] . identifier[get_start_date_metadata] (). identifier[is_read_only] ():
keyword[raise] identifier[errors] . identifier[NoAccess] ()
keyword[if] keyword[not] identifier[self] . identifier[_is_valid_date_time] ( identifier[date] , identifier[self] . identifier[get_start_date_metadata] ()):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ()
identifier[self] . identifier[_my_map] [ literal[string] ]= identifier[date]
|
def set_start_date(self, date):
"""Sets the start date.
arg: date (osid.calendaring.DateTime): the new date
raise: InvalidArgument - ``date`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
raise: NullArgument - ``date`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if self.get_start_date_metadata().is_read_only():
raise errors.NoAccess() # depends on [control=['if'], data=[]]
if not self._is_valid_date_time(date, self.get_start_date_metadata()):
raise errors.InvalidArgument() # depends on [control=['if'], data=[]]
# self._my_map['startDate'] = self._get_date_map(date)
self._my_map['startDate'] = date
|
def get_instance(self, payload):
"""
Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance
"""
return RoleInstance(self._version, payload, service_sid=self._solution['service_sid'], )
|
def function[get_instance, parameter[self, payload]]:
constant[
Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance
]
return[call[name[RoleInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ):
literal[string]
keyword[return] identifier[RoleInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
|
def get_instance(self, payload):
"""
Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance
"""
return RoleInstance(self._version, payload, service_sid=self._solution['service_sid'])
|
def _union_lcs(evaluated_sentences, reference_sentence, prev_union=None):
"""
Returns LCS_u(r_i, C) which is the LCS score of the union longest common
subsequence between reference sentence ri and candidate summary C.
For example:
if r_i= w1 w2 w3 w4 w5, and C contains two sentences: c1 = w1 w2 w6 w7 w8
and c2 = w1 w3 w8 w9 w5, then the longest common subsequence of r_i and c1
is "w1 w2" and the longest common subsequence of r_i and c2 is "w1 w3 w5".
The union longest common subsequence of r_i, c1, and c2 is "w1 w2 w3 w5"
and LCS_u(r_i, C) = 4/5.
Args:
evaluated_sentences: The sentences that have been picked by the
summarizer
reference_sentence: One of the sentences in the reference summaries
Returns:
float: LCS_u(r_i, C)
ValueError:
Raises exception if a param has len <= 0
"""
if prev_union is None:
prev_union = set()
if len(evaluated_sentences) <= 0:
raise ValueError("Collections must contain at least 1 sentence.")
lcs_union = prev_union
prev_count = len(prev_union)
reference_words = _split_into_words([reference_sentence])
combined_lcs_length = 0
for eval_s in evaluated_sentences:
evaluated_words = _split_into_words([eval_s])
lcs = set(_recon_lcs(reference_words, evaluated_words))
combined_lcs_length += len(lcs)
lcs_union = lcs_union.union(lcs)
new_lcs_count = len(lcs_union) - prev_count
return new_lcs_count, lcs_union
|
def function[_union_lcs, parameter[evaluated_sentences, reference_sentence, prev_union]]:
constant[
Returns LCS_u(r_i, C) which is the LCS score of the union longest common
subsequence between reference sentence ri and candidate summary C.
For example:
if r_i= w1 w2 w3 w4 w5, and C contains two sentences: c1 = w1 w2 w6 w7 w8
and c2 = w1 w3 w8 w9 w5, then the longest common subsequence of r_i and c1
is "w1 w2" and the longest common subsequence of r_i and c2 is "w1 w3 w5".
The union longest common subsequence of r_i, c1, and c2 is "w1 w2 w3 w5"
and LCS_u(r_i, C) = 4/5.
Args:
evaluated_sentences: The sentences that have been picked by the
summarizer
reference_sentence: One of the sentences in the reference summaries
Returns:
float: LCS_u(r_i, C)
ValueError:
Raises exception if a param has len <= 0
]
if compare[name[prev_union] is constant[None]] begin[:]
variable[prev_union] assign[=] call[name[set], parameter[]]
if compare[call[name[len], parameter[name[evaluated_sentences]]] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b024fca0>
variable[lcs_union] assign[=] name[prev_union]
variable[prev_count] assign[=] call[name[len], parameter[name[prev_union]]]
variable[reference_words] assign[=] call[name[_split_into_words], parameter[list[[<ast.Name object at 0x7da1b024f100>]]]]
variable[combined_lcs_length] assign[=] constant[0]
for taget[name[eval_s]] in starred[name[evaluated_sentences]] begin[:]
variable[evaluated_words] assign[=] call[name[_split_into_words], parameter[list[[<ast.Name object at 0x7da1b02951b0>]]]]
variable[lcs] assign[=] call[name[set], parameter[call[name[_recon_lcs], parameter[name[reference_words], name[evaluated_words]]]]]
<ast.AugAssign object at 0x7da1b02960b0>
variable[lcs_union] assign[=] call[name[lcs_union].union, parameter[name[lcs]]]
variable[new_lcs_count] assign[=] binary_operation[call[name[len], parameter[name[lcs_union]]] - name[prev_count]]
return[tuple[[<ast.Name object at 0x7da1b0294700>, <ast.Name object at 0x7da1b0297940>]]]
|
keyword[def] identifier[_union_lcs] ( identifier[evaluated_sentences] , identifier[reference_sentence] , identifier[prev_union] = keyword[None] ):
literal[string]
keyword[if] identifier[prev_union] keyword[is] keyword[None] :
identifier[prev_union] = identifier[set] ()
keyword[if] identifier[len] ( identifier[evaluated_sentences] )<= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[lcs_union] = identifier[prev_union]
identifier[prev_count] = identifier[len] ( identifier[prev_union] )
identifier[reference_words] = identifier[_split_into_words] ([ identifier[reference_sentence] ])
identifier[combined_lcs_length] = literal[int]
keyword[for] identifier[eval_s] keyword[in] identifier[evaluated_sentences] :
identifier[evaluated_words] = identifier[_split_into_words] ([ identifier[eval_s] ])
identifier[lcs] = identifier[set] ( identifier[_recon_lcs] ( identifier[reference_words] , identifier[evaluated_words] ))
identifier[combined_lcs_length] += identifier[len] ( identifier[lcs] )
identifier[lcs_union] = identifier[lcs_union] . identifier[union] ( identifier[lcs] )
identifier[new_lcs_count] = identifier[len] ( identifier[lcs_union] )- identifier[prev_count]
keyword[return] identifier[new_lcs_count] , identifier[lcs_union]
|
def _union_lcs(evaluated_sentences, reference_sentence, prev_union=None):
"""
Returns LCS_u(r_i, C) which is the LCS score of the union longest common
subsequence between reference sentence ri and candidate summary C.
For example:
if r_i= w1 w2 w3 w4 w5, and C contains two sentences: c1 = w1 w2 w6 w7 w8
and c2 = w1 w3 w8 w9 w5, then the longest common subsequence of r_i and c1
is "w1 w2" and the longest common subsequence of r_i and c2 is "w1 w3 w5".
The union longest common subsequence of r_i, c1, and c2 is "w1 w2 w3 w5"
and LCS_u(r_i, C) = 4/5.
Args:
evaluated_sentences: The sentences that have been picked by the
summarizer
reference_sentence: One of the sentences in the reference summaries
Returns:
float: LCS_u(r_i, C)
ValueError:
Raises exception if a param has len <= 0
"""
if prev_union is None:
prev_union = set() # depends on [control=['if'], data=['prev_union']]
if len(evaluated_sentences) <= 0:
raise ValueError('Collections must contain at least 1 sentence.') # depends on [control=['if'], data=[]]
lcs_union = prev_union
prev_count = len(prev_union)
reference_words = _split_into_words([reference_sentence])
combined_lcs_length = 0
for eval_s in evaluated_sentences:
evaluated_words = _split_into_words([eval_s])
lcs = set(_recon_lcs(reference_words, evaluated_words))
combined_lcs_length += len(lcs)
lcs_union = lcs_union.union(lcs) # depends on [control=['for'], data=['eval_s']]
new_lcs_count = len(lcs_union) - prev_count
return (new_lcs_count, lcs_union)
|
def pop(cls, anchors):
"""
Args:
anchors (str | unicode | list): Optional paths to use as anchors for short()
"""
for anchor in flattened(anchors, split=SANITIZED | UNIQUE):
if anchor in cls.paths:
cls.paths.remove(anchor)
|
def function[pop, parameter[cls, anchors]]:
constant[
Args:
anchors (str | unicode | list): Optional paths to use as anchors for short()
]
for taget[name[anchor]] in starred[call[name[flattened], parameter[name[anchors]]]] begin[:]
if compare[name[anchor] in name[cls].paths] begin[:]
call[name[cls].paths.remove, parameter[name[anchor]]]
|
keyword[def] identifier[pop] ( identifier[cls] , identifier[anchors] ):
literal[string]
keyword[for] identifier[anchor] keyword[in] identifier[flattened] ( identifier[anchors] , identifier[split] = identifier[SANITIZED] | identifier[UNIQUE] ):
keyword[if] identifier[anchor] keyword[in] identifier[cls] . identifier[paths] :
identifier[cls] . identifier[paths] . identifier[remove] ( identifier[anchor] )
|
def pop(cls, anchors):
"""
Args:
anchors (str | unicode | list): Optional paths to use as anchors for short()
"""
for anchor in flattened(anchors, split=SANITIZED | UNIQUE):
if anchor in cls.paths:
cls.paths.remove(anchor) # depends on [control=['if'], data=['anchor']] # depends on [control=['for'], data=['anchor']]
|
def export(self, private_keys=True):
"""Exports a RFC 7517 keyset using the standard JSON format
:param private_key(bool): Whether to export private keys.
Defaults to True.
"""
exp_dict = dict()
for k, v in iteritems(self):
if k == 'keys':
keys = list()
for jwk in v:
keys.append(json_decode(jwk.export(private_keys)))
v = keys
exp_dict[k] = v
return json_encode(exp_dict)
|
def function[export, parameter[self, private_keys]]:
constant[Exports a RFC 7517 keyset using the standard JSON format
:param private_key(bool): Whether to export private keys.
Defaults to True.
]
variable[exp_dict] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b07b4a60>, <ast.Name object at 0x7da1b07b4be0>]]] in starred[call[name[iteritems], parameter[name[self]]]] begin[:]
if compare[name[k] equal[==] constant[keys]] begin[:]
variable[keys] assign[=] call[name[list], parameter[]]
for taget[name[jwk]] in starred[name[v]] begin[:]
call[name[keys].append, parameter[call[name[json_decode], parameter[call[name[jwk].export, parameter[name[private_keys]]]]]]]
variable[v] assign[=] name[keys]
call[name[exp_dict]][name[k]] assign[=] name[v]
return[call[name[json_encode], parameter[name[exp_dict]]]]
|
keyword[def] identifier[export] ( identifier[self] , identifier[private_keys] = keyword[True] ):
literal[string]
identifier[exp_dict] = identifier[dict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] ):
keyword[if] identifier[k] == literal[string] :
identifier[keys] = identifier[list] ()
keyword[for] identifier[jwk] keyword[in] identifier[v] :
identifier[keys] . identifier[append] ( identifier[json_decode] ( identifier[jwk] . identifier[export] ( identifier[private_keys] )))
identifier[v] = identifier[keys]
identifier[exp_dict] [ identifier[k] ]= identifier[v]
keyword[return] identifier[json_encode] ( identifier[exp_dict] )
|
def export(self, private_keys=True):
"""Exports a RFC 7517 keyset using the standard JSON format
:param private_key(bool): Whether to export private keys.
Defaults to True.
"""
exp_dict = dict()
for (k, v) in iteritems(self):
if k == 'keys':
keys = list()
for jwk in v:
keys.append(json_decode(jwk.export(private_keys))) # depends on [control=['for'], data=['jwk']]
v = keys # depends on [control=['if'], data=[]]
exp_dict[k] = v # depends on [control=['for'], data=[]]
return json_encode(exp_dict)
|
def migrate(self) -> None:
""" Migrate
Perform a database migration, upgrading to the latest schema level.
"""
assert self.ormSessionCreator, "ormSessionCreator is not defined"
connection = self._dbEngine.connect()
isDbInitialised = self._dbEngine.dialect.has_table(
connection, 'alembic_version',
schema=self._metadata.schema)
connection.close()
if isDbInitialised or not self._enableCreateAll:
self._doMigration(self._dbEngine)
else:
self._doCreateAll(self._dbEngine)
if self._enableForeignKeys:
self.checkForeignKeys(self._dbEngine)
|
def function[migrate, parameter[self]]:
constant[ Migrate
Perform a database migration, upgrading to the latest schema level.
]
assert[name[self].ormSessionCreator]
variable[connection] assign[=] call[name[self]._dbEngine.connect, parameter[]]
variable[isDbInitialised] assign[=] call[name[self]._dbEngine.dialect.has_table, parameter[name[connection], constant[alembic_version]]]
call[name[connection].close, parameter[]]
if <ast.BoolOp object at 0x7da20e955e40> begin[:]
call[name[self]._doMigration, parameter[name[self]._dbEngine]]
if name[self]._enableForeignKeys begin[:]
call[name[self].checkForeignKeys, parameter[name[self]._dbEngine]]
|
keyword[def] identifier[migrate] ( identifier[self] )-> keyword[None] :
literal[string]
keyword[assert] identifier[self] . identifier[ormSessionCreator] , literal[string]
identifier[connection] = identifier[self] . identifier[_dbEngine] . identifier[connect] ()
identifier[isDbInitialised] = identifier[self] . identifier[_dbEngine] . identifier[dialect] . identifier[has_table] (
identifier[connection] , literal[string] ,
identifier[schema] = identifier[self] . identifier[_metadata] . identifier[schema] )
identifier[connection] . identifier[close] ()
keyword[if] identifier[isDbInitialised] keyword[or] keyword[not] identifier[self] . identifier[_enableCreateAll] :
identifier[self] . identifier[_doMigration] ( identifier[self] . identifier[_dbEngine] )
keyword[else] :
identifier[self] . identifier[_doCreateAll] ( identifier[self] . identifier[_dbEngine] )
keyword[if] identifier[self] . identifier[_enableForeignKeys] :
identifier[self] . identifier[checkForeignKeys] ( identifier[self] . identifier[_dbEngine] )
|
def migrate(self) -> None:
""" Migrate
Perform a database migration, upgrading to the latest schema level.
"""
assert self.ormSessionCreator, 'ormSessionCreator is not defined'
connection = self._dbEngine.connect()
isDbInitialised = self._dbEngine.dialect.has_table(connection, 'alembic_version', schema=self._metadata.schema)
connection.close()
if isDbInitialised or not self._enableCreateAll:
self._doMigration(self._dbEngine) # depends on [control=['if'], data=[]]
else:
self._doCreateAll(self._dbEngine)
if self._enableForeignKeys:
self.checkForeignKeys(self._dbEngine) # depends on [control=['if'], data=[]]
|
def plane_intersection(self, plane, forward_only=False):
'''return point where line intersects with a plane'''
l_dot_n = self.vector * plane.normal
if l_dot_n == 0.0:
# line is parallel to the plane
return None
d = ((plane.point - self.point) * plane.normal) / l_dot_n
if forward_only and d < 0:
return None
return (self.vector * d) + self.point
|
def function[plane_intersection, parameter[self, plane, forward_only]]:
constant[return point where line intersects with a plane]
variable[l_dot_n] assign[=] binary_operation[name[self].vector * name[plane].normal]
if compare[name[l_dot_n] equal[==] constant[0.0]] begin[:]
return[constant[None]]
variable[d] assign[=] binary_operation[binary_operation[binary_operation[name[plane].point - name[self].point] * name[plane].normal] / name[l_dot_n]]
if <ast.BoolOp object at 0x7da204347190> begin[:]
return[constant[None]]
return[binary_operation[binary_operation[name[self].vector * name[d]] + name[self].point]]
|
keyword[def] identifier[plane_intersection] ( identifier[self] , identifier[plane] , identifier[forward_only] = keyword[False] ):
literal[string]
identifier[l_dot_n] = identifier[self] . identifier[vector] * identifier[plane] . identifier[normal]
keyword[if] identifier[l_dot_n] == literal[int] :
keyword[return] keyword[None]
identifier[d] =(( identifier[plane] . identifier[point] - identifier[self] . identifier[point] )* identifier[plane] . identifier[normal] )/ identifier[l_dot_n]
keyword[if] identifier[forward_only] keyword[and] identifier[d] < literal[int] :
keyword[return] keyword[None]
keyword[return] ( identifier[self] . identifier[vector] * identifier[d] )+ identifier[self] . identifier[point]
|
def plane_intersection(self, plane, forward_only=False):
"""return point where line intersects with a plane"""
l_dot_n = self.vector * plane.normal
if l_dot_n == 0.0:
# line is parallel to the plane
return None # depends on [control=['if'], data=[]]
d = (plane.point - self.point) * plane.normal / l_dot_n
if forward_only and d < 0:
return None # depends on [control=['if'], data=[]]
return self.vector * d + self.point
|
def raise_error(self, error):
"""Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True.
"""
ex = exc.RPCError('Error calling remote procedure: %s' % error.error['message'])
if self.raises_errors:
raise ex
return ex
|
def function[raise_error, parameter[self, error]]:
constant[Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True.
]
variable[ex] assign[=] call[name[exc].RPCError, parameter[binary_operation[constant[Error calling remote procedure: %s] <ast.Mod object at 0x7da2590d6920> call[name[error].error][constant[message]]]]]
if name[self].raises_errors begin[:]
<ast.Raise object at 0x7da18f722da0>
return[name[ex]]
|
keyword[def] identifier[raise_error] ( identifier[self] , identifier[error] ):
literal[string]
identifier[ex] = identifier[exc] . identifier[RPCError] ( literal[string] % identifier[error] . identifier[error] [ literal[string] ])
keyword[if] identifier[self] . identifier[raises_errors] :
keyword[raise] identifier[ex]
keyword[return] identifier[ex]
|
def raise_error(self, error):
"""Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True.
"""
ex = exc.RPCError('Error calling remote procedure: %s' % error.error['message'])
if self.raises_errors:
raise ex # depends on [control=['if'], data=[]]
return ex
|
def compare(self, statement_a, statement_b):
"""
Compare the two input statements.
:return: The percent of similarity between the text of the statements.
:rtype: float
"""
# Return 0 if either statement has a falsy text value
if not statement_a.text or not statement_b.text:
return 0
# Get the lowercase version of both strings
statement_a_text = str(statement_a.text.lower())
statement_b_text = str(statement_b.text.lower())
similarity = SequenceMatcher(
None,
statement_a_text,
statement_b_text
)
# Calculate a decimal percent of the similarity
percent = round(similarity.ratio(), 2)
return percent
|
def function[compare, parameter[self, statement_a, statement_b]]:
constant[
Compare the two input statements.
:return: The percent of similarity between the text of the statements.
:rtype: float
]
if <ast.BoolOp object at 0x7da1b1f76500> begin[:]
return[constant[0]]
variable[statement_a_text] assign[=] call[name[str], parameter[call[name[statement_a].text.lower, parameter[]]]]
variable[statement_b_text] assign[=] call[name[str], parameter[call[name[statement_b].text.lower, parameter[]]]]
variable[similarity] assign[=] call[name[SequenceMatcher], parameter[constant[None], name[statement_a_text], name[statement_b_text]]]
variable[percent] assign[=] call[name[round], parameter[call[name[similarity].ratio, parameter[]], constant[2]]]
return[name[percent]]
|
keyword[def] identifier[compare] ( identifier[self] , identifier[statement_a] , identifier[statement_b] ):
literal[string]
keyword[if] keyword[not] identifier[statement_a] . identifier[text] keyword[or] keyword[not] identifier[statement_b] . identifier[text] :
keyword[return] literal[int]
identifier[statement_a_text] = identifier[str] ( identifier[statement_a] . identifier[text] . identifier[lower] ())
identifier[statement_b_text] = identifier[str] ( identifier[statement_b] . identifier[text] . identifier[lower] ())
identifier[similarity] = identifier[SequenceMatcher] (
keyword[None] ,
identifier[statement_a_text] ,
identifier[statement_b_text]
)
identifier[percent] = identifier[round] ( identifier[similarity] . identifier[ratio] (), literal[int] )
keyword[return] identifier[percent]
|
def compare(self, statement_a, statement_b):
"""
Compare the two input statements.
:return: The percent of similarity between the text of the statements.
:rtype: float
"""
# Return 0 if either statement has a falsy text value
if not statement_a.text or not statement_b.text:
return 0 # depends on [control=['if'], data=[]]
# Get the lowercase version of both strings
statement_a_text = str(statement_a.text.lower())
statement_b_text = str(statement_b.text.lower())
similarity = SequenceMatcher(None, statement_a_text, statement_b_text)
# Calculate a decimal percent of the similarity
percent = round(similarity.ratio(), 2)
return percent
|
def to_json(self):
"""
Returns:
str:
"""
data = dict()
for key, value in self.__dict__.items():
if value:
if hasattr(value, 'to_dict'):
data[key] = value.to_dict()
elif isinstance(value, datetime):
data[key] = value.strftime('%Y-%m-%d %H:%M:%S')
else:
data[key] = value
return json.dumps(data)
|
def function[to_json, parameter[self]]:
constant[
Returns:
str:
]
variable[data] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18bcc85e0>, <ast.Name object at 0x7da18bcc8580>]]] in starred[call[name[self].__dict__.items, parameter[]]] begin[:]
if name[value] begin[:]
if call[name[hasattr], parameter[name[value], constant[to_dict]]] begin[:]
call[name[data]][name[key]] assign[=] call[name[value].to_dict, parameter[]]
return[call[name[json].dumps, parameter[name[data]]]]
|
keyword[def] identifier[to_json] ( identifier[self] ):
literal[string]
identifier[data] = identifier[dict] ()
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[__dict__] . identifier[items] ():
keyword[if] identifier[value] :
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[data] [ identifier[key] ]= identifier[value] . identifier[to_dict] ()
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[datetime] ):
identifier[data] [ identifier[key] ]= identifier[value] . identifier[strftime] ( literal[string] )
keyword[else] :
identifier[data] [ identifier[key] ]= identifier[value]
keyword[return] identifier[json] . identifier[dumps] ( identifier[data] )
|
def to_json(self):
"""
Returns:
str:
"""
data = dict()
for (key, value) in self.__dict__.items():
if value:
if hasattr(value, 'to_dict'):
data[key] = value.to_dict() # depends on [control=['if'], data=[]]
elif isinstance(value, datetime):
data[key] = value.strftime('%Y-%m-%d %H:%M:%S') # depends on [control=['if'], data=[]]
else:
data[key] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return json.dumps(data)
|
def route53_public_hosted_zone_id(self, lookup, default=None):
"""
Args:
lookup: The zone name to look up. Must end with "."
default: the optional value to return if lookup failed; returns None if not set
Returns:
the ID of the public hosted zone for the 'lookup' domain, or default/None if no match found
"""
list_limit = "100"
# enforce terminal '.' in name, otherwise we could get a partial match of the incorrect zones
if lookup[-1] != '.':
return default
hosted_zones = EFAwsResolver.__CLIENTS["route53"].list_hosted_zones_by_name(DNSName=lookup, MaxItems=list_limit)
# Return if the account has no HostedZones
if not hosted_zones.has_key("HostedZones"):
return default
while True:
for hosted_zone in hosted_zones["HostedZones"]:
if lookup == hosted_zone["Name"] and not hosted_zone["Config"]["PrivateZone"]:
return hosted_zone["Id"].split("/")[2]
if hosted_zones["IsTruncated"]:
hosted_zones = EFAwsResolver.__CLIENTS["route53"].list_hosted_zones_by_name(
DNSName=hosted_zones["NextDNSName"], HostedZoneId=hosted_zones["NextHostedZoneId"], MaxItems=list_limit)
else:
return default
|
def function[route53_public_hosted_zone_id, parameter[self, lookup, default]]:
constant[
Args:
lookup: The zone name to look up. Must end with "."
default: the optional value to return if lookup failed; returns None if not set
Returns:
the ID of the public hosted zone for the 'lookup' domain, or default/None if no match found
]
variable[list_limit] assign[=] constant[100]
if compare[call[name[lookup]][<ast.UnaryOp object at 0x7da1b1be78b0>] not_equal[!=] constant[.]] begin[:]
return[name[default]]
variable[hosted_zones] assign[=] call[call[name[EFAwsResolver].__CLIENTS][constant[route53]].list_hosted_zones_by_name, parameter[]]
if <ast.UnaryOp object at 0x7da1b1be7fa0> begin[:]
return[name[default]]
while constant[True] begin[:]
for taget[name[hosted_zone]] in starred[call[name[hosted_zones]][constant[HostedZones]]] begin[:]
if <ast.BoolOp object at 0x7da1b1be7bb0> begin[:]
return[call[call[call[name[hosted_zone]][constant[Id]].split, parameter[constant[/]]]][constant[2]]]
if call[name[hosted_zones]][constant[IsTruncated]] begin[:]
variable[hosted_zones] assign[=] call[call[name[EFAwsResolver].__CLIENTS][constant[route53]].list_hosted_zones_by_name, parameter[]]
|
keyword[def] identifier[route53_public_hosted_zone_id] ( identifier[self] , identifier[lookup] , identifier[default] = keyword[None] ):
literal[string]
identifier[list_limit] = literal[string]
keyword[if] identifier[lookup] [- literal[int] ]!= literal[string] :
keyword[return] identifier[default]
identifier[hosted_zones] = identifier[EFAwsResolver] . identifier[__CLIENTS] [ literal[string] ]. identifier[list_hosted_zones_by_name] ( identifier[DNSName] = identifier[lookup] , identifier[MaxItems] = identifier[list_limit] )
keyword[if] keyword[not] identifier[hosted_zones] . identifier[has_key] ( literal[string] ):
keyword[return] identifier[default]
keyword[while] keyword[True] :
keyword[for] identifier[hosted_zone] keyword[in] identifier[hosted_zones] [ literal[string] ]:
keyword[if] identifier[lookup] == identifier[hosted_zone] [ literal[string] ] keyword[and] keyword[not] identifier[hosted_zone] [ literal[string] ][ literal[string] ]:
keyword[return] identifier[hosted_zone] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[hosted_zones] [ literal[string] ]:
identifier[hosted_zones] = identifier[EFAwsResolver] . identifier[__CLIENTS] [ literal[string] ]. identifier[list_hosted_zones_by_name] (
identifier[DNSName] = identifier[hosted_zones] [ literal[string] ], identifier[HostedZoneId] = identifier[hosted_zones] [ literal[string] ], identifier[MaxItems] = identifier[list_limit] )
keyword[else] :
keyword[return] identifier[default]
|
def route53_public_hosted_zone_id(self, lookup, default=None):
"""
Args:
lookup: The zone name to look up. Must end with "."
default: the optional value to return if lookup failed; returns None if not set
Returns:
the ID of the public hosted zone for the 'lookup' domain, or default/None if no match found
"""
list_limit = '100'
# enforce terminal '.' in name, otherwise we could get a partial match of the incorrect zones
if lookup[-1] != '.':
return default # depends on [control=['if'], data=[]]
hosted_zones = EFAwsResolver.__CLIENTS['route53'].list_hosted_zones_by_name(DNSName=lookup, MaxItems=list_limit)
# Return if the account has no HostedZones
if not hosted_zones.has_key('HostedZones'):
return default # depends on [control=['if'], data=[]]
while True:
for hosted_zone in hosted_zones['HostedZones']:
if lookup == hosted_zone['Name'] and (not hosted_zone['Config']['PrivateZone']):
return hosted_zone['Id'].split('/')[2] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['hosted_zone']]
if hosted_zones['IsTruncated']:
hosted_zones = EFAwsResolver.__CLIENTS['route53'].list_hosted_zones_by_name(DNSName=hosted_zones['NextDNSName'], HostedZoneId=hosted_zones['NextHostedZoneId'], MaxItems=list_limit) # depends on [control=['if'], data=[]]
else:
return default # depends on [control=['while'], data=[]]
|
def write_tar(
src_fs, # type: FS
file, # type: Union[Text, BinaryIO]
compression=None, # type: Optional[Text]
encoding="utf-8", # type: Text
walker=None, # type: Optional[Walker]
):
# type: (...) -> None
"""Write the contents of a filesystem to a tar file.
Arguments:
file (str or io.IOBase): Destination file, may be a file
name or an open file object.
compression (str, optional): Compression to use, or `None`
for a plain Tar archive without compression.
encoding(str): The encoding to use for filenames. The
default is ``"utf-8"``.
walker (~fs.walk.Walker, optional): A `Walker` instance, or
`None` to use default walker. You can use this to specify
which files you want to compress.
"""
type_map = {
ResourceType.block_special_file: tarfile.BLKTYPE,
ResourceType.character: tarfile.CHRTYPE,
ResourceType.directory: tarfile.DIRTYPE,
ResourceType.fifo: tarfile.FIFOTYPE,
ResourceType.file: tarfile.REGTYPE,
ResourceType.socket: tarfile.AREGTYPE, # no type for socket
ResourceType.symlink: tarfile.SYMTYPE,
ResourceType.unknown: tarfile.AREGTYPE, # no type for unknown
}
tar_attr = [("uid", "uid"), ("gid", "gid"), ("uname", "user"), ("gname", "group")]
mode = "w:{}".format(compression or "")
if isinstance(file, (six.text_type, six.binary_type)):
_tar = tarfile.open(file, mode=mode)
else:
_tar = tarfile.open(fileobj=file, mode=mode)
current_time = time.time()
walker = walker or Walker()
with _tar:
gen_walk = walker.info(src_fs, namespaces=["details", "stat", "access"])
for path, info in gen_walk:
# Tar names must be relative
tar_name = relpath(path)
if not six.PY3:
# Python2 expects bytes filenames
tar_name = tar_name.encode(encoding, "replace")
tar_info = tarfile.TarInfo(tar_name)
if info.has_namespace("stat"):
mtime = info.get("stat", "st_mtime", current_time)
else:
mtime = info.modified or current_time
if isinstance(mtime, datetime):
mtime = datetime_to_epoch(mtime)
if isinstance(mtime, float):
mtime = int(mtime)
tar_info.mtime = mtime
for tarattr, infoattr in tar_attr:
if getattr(info, infoattr, None) is not None:
setattr(tar_info, tarattr, getattr(info, infoattr, None))
if info.has_namespace("access"):
tar_info.mode = getattr(info.permissions, "mode", 0o420)
if info.is_dir:
tar_info.type = tarfile.DIRTYPE
_tar.addfile(tar_info)
else:
tar_info.type = type_map.get(info.type, tarfile.REGTYPE)
tar_info.size = info.size
with src_fs.openbin(path) as bin_file:
_tar.addfile(tar_info, bin_file)
|
def function[write_tar, parameter[src_fs, file, compression, encoding, walker]]:
constant[Write the contents of a filesystem to a tar file.
Arguments:
file (str or io.IOBase): Destination file, may be a file
name or an open file object.
compression (str, optional): Compression to use, or `None`
for a plain Tar archive without compression.
encoding(str): The encoding to use for filenames. The
default is ``"utf-8"``.
walker (~fs.walk.Walker, optional): A `Walker` instance, or
`None` to use default walker. You can use this to specify
which files you want to compress.
]
variable[type_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b1663160>, <ast.Attribute object at 0x7da1b1661240>, <ast.Attribute object at 0x7da1b1663370>, <ast.Attribute object at 0x7da1b1663640>, <ast.Attribute object at 0x7da1b1660bb0>, <ast.Attribute object at 0x7da1b1663190>, <ast.Attribute object at 0x7da1b1660280>, <ast.Attribute object at 0x7da1b1661600>], [<ast.Attribute object at 0x7da1b16614e0>, <ast.Attribute object at 0x7da1b1663b20>, <ast.Attribute object at 0x7da1b1661870>, <ast.Attribute object at 0x7da1b1660eb0>, <ast.Attribute object at 0x7da1b1662140>, <ast.Attribute object at 0x7da1b1662530>, <ast.Attribute object at 0x7da1b1661120>, <ast.Attribute object at 0x7da1b1662da0>]]
variable[tar_attr] assign[=] list[[<ast.Tuple object at 0x7da1b16634f0>, <ast.Tuple object at 0x7da1b16619f0>, <ast.Tuple object at 0x7da1b1662d40>, <ast.Tuple object at 0x7da1b1661e10>]]
variable[mode] assign[=] call[constant[w:{}].format, parameter[<ast.BoolOp object at 0x7da1b1663910>]]
if call[name[isinstance], parameter[name[file], tuple[[<ast.Attribute object at 0x7da1b16bc370>, <ast.Attribute object at 0x7da1b16be560>]]]] begin[:]
variable[_tar] assign[=] call[name[tarfile].open, parameter[name[file]]]
variable[current_time] assign[=] call[name[time].time, parameter[]]
variable[walker] assign[=] <ast.BoolOp object at 0x7da1b16bfc70>
with name[_tar] begin[:]
variable[gen_walk] assign[=] call[name[walker].info, parameter[name[src_fs]]]
for taget[tuple[[<ast.Name object at 0x7da1b16bf850>, <ast.Name object at 0x7da1b16be9b0>]]] in starred[name[gen_walk]] begin[:]
variable[tar_name] assign[=] call[name[relpath], parameter[name[path]]]
if <ast.UnaryOp object at 0x7da1b16bce80> begin[:]
variable[tar_name] assign[=] call[name[tar_name].encode, parameter[name[encoding], constant[replace]]]
variable[tar_info] assign[=] call[name[tarfile].TarInfo, parameter[name[tar_name]]]
if call[name[info].has_namespace, parameter[constant[stat]]] begin[:]
variable[mtime] assign[=] call[name[info].get, parameter[constant[stat], constant[st_mtime], name[current_time]]]
if call[name[isinstance], parameter[name[mtime], name[datetime]]] begin[:]
variable[mtime] assign[=] call[name[datetime_to_epoch], parameter[name[mtime]]]
if call[name[isinstance], parameter[name[mtime], name[float]]] begin[:]
variable[mtime] assign[=] call[name[int], parameter[name[mtime]]]
name[tar_info].mtime assign[=] name[mtime]
for taget[tuple[[<ast.Name object at 0x7da1b16bd540>, <ast.Name object at 0x7da1b16bf280>]]] in starred[name[tar_attr]] begin[:]
if compare[call[name[getattr], parameter[name[info], name[infoattr], constant[None]]] is_not constant[None]] begin[:]
call[name[setattr], parameter[name[tar_info], name[tarattr], call[name[getattr], parameter[name[info], name[infoattr], constant[None]]]]]
if call[name[info].has_namespace, parameter[constant[access]]] begin[:]
name[tar_info].mode assign[=] call[name[getattr], parameter[name[info].permissions, constant[mode], constant[272]]]
if name[info].is_dir begin[:]
name[tar_info].type assign[=] name[tarfile].DIRTYPE
call[name[_tar].addfile, parameter[name[tar_info]]]
|
keyword[def] identifier[write_tar] (
identifier[src_fs] ,
identifier[file] ,
identifier[compression] = keyword[None] ,
identifier[encoding] = literal[string] ,
identifier[walker] = keyword[None] ,
):
literal[string]
identifier[type_map] ={
identifier[ResourceType] . identifier[block_special_file] : identifier[tarfile] . identifier[BLKTYPE] ,
identifier[ResourceType] . identifier[character] : identifier[tarfile] . identifier[CHRTYPE] ,
identifier[ResourceType] . identifier[directory] : identifier[tarfile] . identifier[DIRTYPE] ,
identifier[ResourceType] . identifier[fifo] : identifier[tarfile] . identifier[FIFOTYPE] ,
identifier[ResourceType] . identifier[file] : identifier[tarfile] . identifier[REGTYPE] ,
identifier[ResourceType] . identifier[socket] : identifier[tarfile] . identifier[AREGTYPE] ,
identifier[ResourceType] . identifier[symlink] : identifier[tarfile] . identifier[SYMTYPE] ,
identifier[ResourceType] . identifier[unknown] : identifier[tarfile] . identifier[AREGTYPE] ,
}
identifier[tar_attr] =[( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] )]
identifier[mode] = literal[string] . identifier[format] ( identifier[compression] keyword[or] literal[string] )
keyword[if] identifier[isinstance] ( identifier[file] ,( identifier[six] . identifier[text_type] , identifier[six] . identifier[binary_type] )):
identifier[_tar] = identifier[tarfile] . identifier[open] ( identifier[file] , identifier[mode] = identifier[mode] )
keyword[else] :
identifier[_tar] = identifier[tarfile] . identifier[open] ( identifier[fileobj] = identifier[file] , identifier[mode] = identifier[mode] )
identifier[current_time] = identifier[time] . identifier[time] ()
identifier[walker] = identifier[walker] keyword[or] identifier[Walker] ()
keyword[with] identifier[_tar] :
identifier[gen_walk] = identifier[walker] . identifier[info] ( identifier[src_fs] , identifier[namespaces] =[ literal[string] , literal[string] , literal[string] ])
keyword[for] identifier[path] , identifier[info] keyword[in] identifier[gen_walk] :
identifier[tar_name] = identifier[relpath] ( identifier[path] )
keyword[if] keyword[not] identifier[six] . identifier[PY3] :
identifier[tar_name] = identifier[tar_name] . identifier[encode] ( identifier[encoding] , literal[string] )
identifier[tar_info] = identifier[tarfile] . identifier[TarInfo] ( identifier[tar_name] )
keyword[if] identifier[info] . identifier[has_namespace] ( literal[string] ):
identifier[mtime] = identifier[info] . identifier[get] ( literal[string] , literal[string] , identifier[current_time] )
keyword[else] :
identifier[mtime] = identifier[info] . identifier[modified] keyword[or] identifier[current_time]
keyword[if] identifier[isinstance] ( identifier[mtime] , identifier[datetime] ):
identifier[mtime] = identifier[datetime_to_epoch] ( identifier[mtime] )
keyword[if] identifier[isinstance] ( identifier[mtime] , identifier[float] ):
identifier[mtime] = identifier[int] ( identifier[mtime] )
identifier[tar_info] . identifier[mtime] = identifier[mtime]
keyword[for] identifier[tarattr] , identifier[infoattr] keyword[in] identifier[tar_attr] :
keyword[if] identifier[getattr] ( identifier[info] , identifier[infoattr] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
identifier[setattr] ( identifier[tar_info] , identifier[tarattr] , identifier[getattr] ( identifier[info] , identifier[infoattr] , keyword[None] ))
keyword[if] identifier[info] . identifier[has_namespace] ( literal[string] ):
identifier[tar_info] . identifier[mode] = identifier[getattr] ( identifier[info] . identifier[permissions] , literal[string] , literal[int] )
keyword[if] identifier[info] . identifier[is_dir] :
identifier[tar_info] . identifier[type] = identifier[tarfile] . identifier[DIRTYPE]
identifier[_tar] . identifier[addfile] ( identifier[tar_info] )
keyword[else] :
identifier[tar_info] . identifier[type] = identifier[type_map] . identifier[get] ( identifier[info] . identifier[type] , identifier[tarfile] . identifier[REGTYPE] )
identifier[tar_info] . identifier[size] = identifier[info] . identifier[size]
keyword[with] identifier[src_fs] . identifier[openbin] ( identifier[path] ) keyword[as] identifier[bin_file] :
identifier[_tar] . identifier[addfile] ( identifier[tar_info] , identifier[bin_file] )
|
def write_tar(src_fs, file, compression=None, encoding='utf-8', walker=None): # type: FS
# type: Union[Text, BinaryIO]
# type: Optional[Text]
# type: Text
# type: Optional[Walker]
# type: (...) -> None
'Write the contents of a filesystem to a tar file.\n\n Arguments:\n file (str or io.IOBase): Destination file, may be a file\n name or an open file object.\n compression (str, optional): Compression to use, or `None`\n for a plain Tar archive without compression.\n encoding(str): The encoding to use for filenames. The\n default is ``"utf-8"``.\n walker (~fs.walk.Walker, optional): A `Walker` instance, or\n `None` to use default walker. You can use this to specify\n which files you want to compress.\n\n ' # no type for socket
# no type for unknown
type_map = {ResourceType.block_special_file: tarfile.BLKTYPE, ResourceType.character: tarfile.CHRTYPE, ResourceType.directory: tarfile.DIRTYPE, ResourceType.fifo: tarfile.FIFOTYPE, ResourceType.file: tarfile.REGTYPE, ResourceType.socket: tarfile.AREGTYPE, ResourceType.symlink: tarfile.SYMTYPE, ResourceType.unknown: tarfile.AREGTYPE}
tar_attr = [('uid', 'uid'), ('gid', 'gid'), ('uname', 'user'), ('gname', 'group')]
mode = 'w:{}'.format(compression or '')
if isinstance(file, (six.text_type, six.binary_type)):
_tar = tarfile.open(file, mode=mode) # depends on [control=['if'], data=[]]
else:
_tar = tarfile.open(fileobj=file, mode=mode)
current_time = time.time()
walker = walker or Walker()
with _tar:
gen_walk = walker.info(src_fs, namespaces=['details', 'stat', 'access'])
for (path, info) in gen_walk:
# Tar names must be relative
tar_name = relpath(path)
if not six.PY3:
# Python2 expects bytes filenames
tar_name = tar_name.encode(encoding, 'replace') # depends on [control=['if'], data=[]]
tar_info = tarfile.TarInfo(tar_name)
if info.has_namespace('stat'):
mtime = info.get('stat', 'st_mtime', current_time) # depends on [control=['if'], data=[]]
else:
mtime = info.modified or current_time
if isinstance(mtime, datetime):
mtime = datetime_to_epoch(mtime) # depends on [control=['if'], data=[]]
if isinstance(mtime, float):
mtime = int(mtime) # depends on [control=['if'], data=[]]
tar_info.mtime = mtime
for (tarattr, infoattr) in tar_attr:
if getattr(info, infoattr, None) is not None:
setattr(tar_info, tarattr, getattr(info, infoattr, None)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if info.has_namespace('access'):
tar_info.mode = getattr(info.permissions, 'mode', 272) # depends on [control=['if'], data=[]]
if info.is_dir:
tar_info.type = tarfile.DIRTYPE
_tar.addfile(tar_info) # depends on [control=['if'], data=[]]
else:
tar_info.type = type_map.get(info.type, tarfile.REGTYPE)
tar_info.size = info.size
with src_fs.openbin(path) as bin_file:
_tar.addfile(tar_info, bin_file) # depends on [control=['with'], data=['bin_file']] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]]
|
def _correct(token, term_freq):
"""
Correct a single token according to the term_freq
"""
if token.lower() in term_freq:
return token
e1 = [t for t in _ed1(token) if t in term_freq]
if len(e1) > 0:
e1.sort(key=term_freq.get)
return e1[0]
e2 = [t for t in _ed2(token) if t in term_freq]
if len(e2) > 0:
e2.sort(key=term_freq.get)
return e2[0]
return token
|
def function[_correct, parameter[token, term_freq]]:
constant[
Correct a single token according to the term_freq
]
if compare[call[name[token].lower, parameter[]] in name[term_freq]] begin[:]
return[name[token]]
variable[e1] assign[=] <ast.ListComp object at 0x7da18f00d990>
if compare[call[name[len], parameter[name[e1]]] greater[>] constant[0]] begin[:]
call[name[e1].sort, parameter[]]
return[call[name[e1]][constant[0]]]
variable[e2] assign[=] <ast.ListComp object at 0x7da20c76d3c0>
if compare[call[name[len], parameter[name[e2]]] greater[>] constant[0]] begin[:]
call[name[e2].sort, parameter[]]
return[call[name[e2]][constant[0]]]
return[name[token]]
|
keyword[def] identifier[_correct] ( identifier[token] , identifier[term_freq] ):
literal[string]
keyword[if] identifier[token] . identifier[lower] () keyword[in] identifier[term_freq] :
keyword[return] identifier[token]
identifier[e1] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[_ed1] ( identifier[token] ) keyword[if] identifier[t] keyword[in] identifier[term_freq] ]
keyword[if] identifier[len] ( identifier[e1] )> literal[int] :
identifier[e1] . identifier[sort] ( identifier[key] = identifier[term_freq] . identifier[get] )
keyword[return] identifier[e1] [ literal[int] ]
identifier[e2] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[_ed2] ( identifier[token] ) keyword[if] identifier[t] keyword[in] identifier[term_freq] ]
keyword[if] identifier[len] ( identifier[e2] )> literal[int] :
identifier[e2] . identifier[sort] ( identifier[key] = identifier[term_freq] . identifier[get] )
keyword[return] identifier[e2] [ literal[int] ]
keyword[return] identifier[token]
|
def _correct(token, term_freq):
"""
Correct a single token according to the term_freq
"""
if token.lower() in term_freq:
return token # depends on [control=['if'], data=[]]
e1 = [t for t in _ed1(token) if t in term_freq]
if len(e1) > 0:
e1.sort(key=term_freq.get)
return e1[0] # depends on [control=['if'], data=[]]
e2 = [t for t in _ed2(token) if t in term_freq]
if len(e2) > 0:
e2.sort(key=term_freq.get)
return e2[0] # depends on [control=['if'], data=[]]
return token
|
def start_list(self):
"""Start a list."""
self._ordered = False
self.start_container(List)
self.set_next_paragraph_style('list-paragraph'
if self._item_level <= 0
else 'sublist-paragraph')
|
def function[start_list, parameter[self]]:
constant[Start a list.]
name[self]._ordered assign[=] constant[False]
call[name[self].start_container, parameter[name[List]]]
call[name[self].set_next_paragraph_style, parameter[<ast.IfExp object at 0x7da1b0579a50>]]
|
keyword[def] identifier[start_list] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_ordered] = keyword[False]
identifier[self] . identifier[start_container] ( identifier[List] )
identifier[self] . identifier[set_next_paragraph_style] ( literal[string]
keyword[if] identifier[self] . identifier[_item_level] <= literal[int]
keyword[else] literal[string] )
|
def start_list(self):
"""Start a list."""
self._ordered = False
self.start_container(List)
self.set_next_paragraph_style('list-paragraph' if self._item_level <= 0 else 'sublist-paragraph')
|
def SolveServiceArea(self, facilities=None,
barriers=None,
polylineBarriers=None,
polygonBarriers=None,
attributeParameterValues=None,
defaultBreaks=None,
excludeSourcesFromPolygons=None,
mergeSimilarPolygonRanges=None,
outputLines=None,
outputPolygons=None,
overlapLines=None,
overlapPolygons=None,
splitLinesAtBreaks=None,
splitPolygonsAtBreaks=None,
travelDirection=None,
trimOuterPolygon=None,
trimPolygonDistance=None,
trimPolygonDistanceUnits=None,
accumulateAttributeNames=None,
impedanceAttributeName=None,
restrictionAttributeNames=None,
restrictUTurns=None,
outputGeometryPrecision=None,
outputGeometryPrecisionUnits=None):
"""The solve operation is performed on a network layer resource of type
service area (layerType is esriNAServerServiceArea)."""
raise NotImplementedError()
|
def function[SolveServiceArea, parameter[self, facilities, barriers, polylineBarriers, polygonBarriers, attributeParameterValues, defaultBreaks, excludeSourcesFromPolygons, mergeSimilarPolygonRanges, outputLines, outputPolygons, overlapLines, overlapPolygons, splitLinesAtBreaks, splitPolygonsAtBreaks, travelDirection, trimOuterPolygon, trimPolygonDistance, trimPolygonDistanceUnits, accumulateAttributeNames, impedanceAttributeName, restrictionAttributeNames, restrictUTurns, outputGeometryPrecision, outputGeometryPrecisionUnits]]:
constant[The solve operation is performed on a network layer resource of type
service area (layerType is esriNAServerServiceArea).]
<ast.Raise object at 0x7da20c6abb80>
|
keyword[def] identifier[SolveServiceArea] ( identifier[self] , identifier[facilities] = keyword[None] ,
identifier[barriers] = keyword[None] ,
identifier[polylineBarriers] = keyword[None] ,
identifier[polygonBarriers] = keyword[None] ,
identifier[attributeParameterValues] = keyword[None] ,
identifier[defaultBreaks] = keyword[None] ,
identifier[excludeSourcesFromPolygons] = keyword[None] ,
identifier[mergeSimilarPolygonRanges] = keyword[None] ,
identifier[outputLines] = keyword[None] ,
identifier[outputPolygons] = keyword[None] ,
identifier[overlapLines] = keyword[None] ,
identifier[overlapPolygons] = keyword[None] ,
identifier[splitLinesAtBreaks] = keyword[None] ,
identifier[splitPolygonsAtBreaks] = keyword[None] ,
identifier[travelDirection] = keyword[None] ,
identifier[trimOuterPolygon] = keyword[None] ,
identifier[trimPolygonDistance] = keyword[None] ,
identifier[trimPolygonDistanceUnits] = keyword[None] ,
identifier[accumulateAttributeNames] = keyword[None] ,
identifier[impedanceAttributeName] = keyword[None] ,
identifier[restrictionAttributeNames] = keyword[None] ,
identifier[restrictUTurns] = keyword[None] ,
identifier[outputGeometryPrecision] = keyword[None] ,
identifier[outputGeometryPrecisionUnits] = keyword[None] ):
literal[string]
keyword[raise] identifier[NotImplementedError] ()
|
def SolveServiceArea(self, facilities=None, barriers=None, polylineBarriers=None, polygonBarriers=None, attributeParameterValues=None, defaultBreaks=None, excludeSourcesFromPolygons=None, mergeSimilarPolygonRanges=None, outputLines=None, outputPolygons=None, overlapLines=None, overlapPolygons=None, splitLinesAtBreaks=None, splitPolygonsAtBreaks=None, travelDirection=None, trimOuterPolygon=None, trimPolygonDistance=None, trimPolygonDistanceUnits=None, accumulateAttributeNames=None, impedanceAttributeName=None, restrictionAttributeNames=None, restrictUTurns=None, outputGeometryPrecision=None, outputGeometryPrecisionUnits=None):
"""The solve operation is performed on a network layer resource of type
service area (layerType is esriNAServerServiceArea)."""
raise NotImplementedError()
|
def profile_list(self, provider, lookup='all'):
'''
Return a mapping of all configured profiles
'''
data = {}
lookups = self.lookup_profiles(provider, lookup)
if not lookups:
return data
for alias, driver in lookups:
if alias not in data:
data[alias] = {}
if driver not in data[alias]:
data[alias][driver] = {}
return data
|
def function[profile_list, parameter[self, provider, lookup]]:
constant[
Return a mapping of all configured profiles
]
variable[data] assign[=] dictionary[[], []]
variable[lookups] assign[=] call[name[self].lookup_profiles, parameter[name[provider], name[lookup]]]
if <ast.UnaryOp object at 0x7da1b2035de0> begin[:]
return[name[data]]
for taget[tuple[[<ast.Name object at 0x7da1b2034c40>, <ast.Name object at 0x7da1b2037c10>]]] in starred[name[lookups]] begin[:]
if compare[name[alias] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
call[name[data]][name[alias]] assign[=] dictionary[[], []]
if compare[name[driver] <ast.NotIn object at 0x7da2590d7190> call[name[data]][name[alias]]] begin[:]
call[call[name[data]][name[alias]]][name[driver]] assign[=] dictionary[[], []]
return[name[data]]
|
keyword[def] identifier[profile_list] ( identifier[self] , identifier[provider] , identifier[lookup] = literal[string] ):
literal[string]
identifier[data] ={}
identifier[lookups] = identifier[self] . identifier[lookup_profiles] ( identifier[provider] , identifier[lookup] )
keyword[if] keyword[not] identifier[lookups] :
keyword[return] identifier[data]
keyword[for] identifier[alias] , identifier[driver] keyword[in] identifier[lookups] :
keyword[if] identifier[alias] keyword[not] keyword[in] identifier[data] :
identifier[data] [ identifier[alias] ]={}
keyword[if] identifier[driver] keyword[not] keyword[in] identifier[data] [ identifier[alias] ]:
identifier[data] [ identifier[alias] ][ identifier[driver] ]={}
keyword[return] identifier[data]
|
def profile_list(self, provider, lookup='all'):
"""
Return a mapping of all configured profiles
"""
data = {}
lookups = self.lookup_profiles(provider, lookup)
if not lookups:
return data # depends on [control=['if'], data=[]]
for (alias, driver) in lookups:
if alias not in data:
data[alias] = {} # depends on [control=['if'], data=['alias', 'data']]
if driver not in data[alias]:
data[alias][driver] = {} # depends on [control=['if'], data=['driver']] # depends on [control=['for'], data=[]]
return data
|
def send_reset_password_email(self, user, user_email):
"""Send the 'reset password' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
assert self.user_manager.USER_ENABLE_FORGOT_PASSWORD
# The reset_password email is sent to a specific user_email.email or user.email
email = user_email.email if user_email else user.email
# Generate a reset_password_link
token = self.user_manager.generate_token(user.id)
reset_password_link = url_for('user.reset_password', token=token, _external=True)
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_RESET_PASSWORD_EMAIL_TEMPLATE,
reset_password_link=reset_password_link,
)
|
def function[send_reset_password_email, parameter[self, user, user_email]]:
constant[Send the 'reset password' email.]
if <ast.UnaryOp object at 0x7da1b1e8cbe0> begin[:]
return[None]
assert[name[self].user_manager.USER_ENABLE_FORGOT_PASSWORD]
variable[email] assign[=] <ast.IfExp object at 0x7da1b1e8fee0>
variable[token] assign[=] call[name[self].user_manager.generate_token, parameter[name[user].id]]
variable[reset_password_link] assign[=] call[name[url_for], parameter[constant[user.reset_password]]]
call[name[self]._render_and_send_email, parameter[name[email], name[user], name[self].user_manager.USER_RESET_PASSWORD_EMAIL_TEMPLATE]]
|
keyword[def] identifier[send_reset_password_email] ( identifier[self] , identifier[user] , identifier[user_email] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[user_manager] . identifier[USER_ENABLE_EMAIL] : keyword[return]
keyword[assert] identifier[self] . identifier[user_manager] . identifier[USER_ENABLE_FORGOT_PASSWORD]
identifier[email] = identifier[user_email] . identifier[email] keyword[if] identifier[user_email] keyword[else] identifier[user] . identifier[email]
identifier[token] = identifier[self] . identifier[user_manager] . identifier[generate_token] ( identifier[user] . identifier[id] )
identifier[reset_password_link] = identifier[url_for] ( literal[string] , identifier[token] = identifier[token] , identifier[_external] = keyword[True] )
identifier[self] . identifier[_render_and_send_email] (
identifier[email] ,
identifier[user] ,
identifier[self] . identifier[user_manager] . identifier[USER_RESET_PASSWORD_EMAIL_TEMPLATE] ,
identifier[reset_password_link] = identifier[reset_password_link] ,
)
|
def send_reset_password_email(self, user, user_email):
"""Send the 'reset password' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL:
return # depends on [control=['if'], data=[]]
assert self.user_manager.USER_ENABLE_FORGOT_PASSWORD
# The reset_password email is sent to a specific user_email.email or user.email
email = user_email.email if user_email else user.email
# Generate a reset_password_link
token = self.user_manager.generate_token(user.id)
reset_password_link = url_for('user.reset_password', token=token, _external=True)
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(email, user, self.user_manager.USER_RESET_PASSWORD_EMAIL_TEMPLATE, reset_password_link=reset_password_link)
|
def moving_average(data, periods, type='simple'):
"""
compute a <periods> period moving average.
type is 'simple' | 'exponential'
"""
data = np.asarray(data)
if type == 'simple':
weights = np.ones(periods)
else:
weights = np.exp(np.linspace(-1., 0., periods))
weights /= weights.sum()
mavg = np.convolve(data, weights, mode='full')[:len(data)]
mavg[:periods] = mavg[periods]
return mavg
|
def function[moving_average, parameter[data, periods, type]]:
constant[
compute a <periods> period moving average.
type is 'simple' | 'exponential'
]
variable[data] assign[=] call[name[np].asarray, parameter[name[data]]]
if compare[name[type] equal[==] constant[simple]] begin[:]
variable[weights] assign[=] call[name[np].ones, parameter[name[periods]]]
<ast.AugAssign object at 0x7da1b0efbe20>
variable[mavg] assign[=] call[call[name[np].convolve, parameter[name[data], name[weights]]]][<ast.Slice object at 0x7da1b0ef9ae0>]
call[name[mavg]][<ast.Slice object at 0x7da1b0efa920>] assign[=] call[name[mavg]][name[periods]]
return[name[mavg]]
|
keyword[def] identifier[moving_average] ( identifier[data] , identifier[periods] , identifier[type] = literal[string] ):
literal[string]
identifier[data] = identifier[np] . identifier[asarray] ( identifier[data] )
keyword[if] identifier[type] == literal[string] :
identifier[weights] = identifier[np] . identifier[ones] ( identifier[periods] )
keyword[else] :
identifier[weights] = identifier[np] . identifier[exp] ( identifier[np] . identifier[linspace] (- literal[int] , literal[int] , identifier[periods] ))
identifier[weights] /= identifier[weights] . identifier[sum] ()
identifier[mavg] = identifier[np] . identifier[convolve] ( identifier[data] , identifier[weights] , identifier[mode] = literal[string] )[: identifier[len] ( identifier[data] )]
identifier[mavg] [: identifier[periods] ]= identifier[mavg] [ identifier[periods] ]
keyword[return] identifier[mavg]
|
def moving_average(data, periods, type='simple'):
"""
compute a <periods> period moving average.
type is 'simple' | 'exponential'
"""
data = np.asarray(data)
if type == 'simple':
weights = np.ones(periods) # depends on [control=['if'], data=[]]
else:
weights = np.exp(np.linspace(-1.0, 0.0, periods))
weights /= weights.sum()
mavg = np.convolve(data, weights, mode='full')[:len(data)]
mavg[:periods] = mavg[periods]
return mavg
|
def getnames():
"""
get mail names
"""
namestring = ""
addmore = 1
while addmore:
scientist = input("Enter name - <Return> when done ")
if scientist != "":
namestring = namestring + ":" + scientist
else:
namestring = namestring[1:]
addmore = 0
return namestring
|
def function[getnames, parameter[]]:
constant[
get mail names
]
variable[namestring] assign[=] constant[]
variable[addmore] assign[=] constant[1]
while name[addmore] begin[:]
variable[scientist] assign[=] call[name[input], parameter[constant[Enter name - <Return> when done ]]]
if compare[name[scientist] not_equal[!=] constant[]] begin[:]
variable[namestring] assign[=] binary_operation[binary_operation[name[namestring] + constant[:]] + name[scientist]]
return[name[namestring]]
|
keyword[def] identifier[getnames] ():
literal[string]
identifier[namestring] = literal[string]
identifier[addmore] = literal[int]
keyword[while] identifier[addmore] :
identifier[scientist] = identifier[input] ( literal[string] )
keyword[if] identifier[scientist] != literal[string] :
identifier[namestring] = identifier[namestring] + literal[string] + identifier[scientist]
keyword[else] :
identifier[namestring] = identifier[namestring] [ literal[int] :]
identifier[addmore] = literal[int]
keyword[return] identifier[namestring]
|
def getnames():
"""
get mail names
"""
namestring = ''
addmore = 1
while addmore:
scientist = input('Enter name - <Return> when done ')
if scientist != '':
namestring = namestring + ':' + scientist # depends on [control=['if'], data=['scientist']]
else:
namestring = namestring[1:]
addmore = 0 # depends on [control=['while'], data=[]]
return namestring
|
def configGet(self, vartype, category, name, optional=False, specialReturnMessage=None):
"""
Wraps a try / except and a check for self.filename around ConfigParser
as it talks to the configuration file.
Also, checks for existence of configuration file so this won't execute (and fail)
when no configuration file is provided (e.g., running in coupled mode with CSDMS
entirely with getters and setters)
vartype can be 'float', 'str' or 'string' (str and string are the same),
or 'int' or 'integer' (also the same).
"Optional" determines whether or not the program will exit if the variable
fails to load. Set it to "True" if you don't want it to exit. In this case,
the variable will be set to "None". Otherwise, it defaults to "False".
"specialReturnMessage" is something that you would like to add at the end
of a failure to execute message. By default it does not print.
"""
try:
if vartype == 'float':
var = self.config.getfloat(category, name)
elif vartype == 'string' or vartype == 'str':
var = self.config.get(category, name)
if var == "" and optional == False:
# but "" is acceptable for boundary conditions
if name[:17] != 'BoundaryCondition':
if self.Quiet != True:
print("An empty input string here is not an acceptable option.")
print(name, "is not optional.")
print("Program crash likely to occur.")
elif vartype == 'integer' or vartype == 'int':
var = self.config.getint(category, name)
elif vartype == 'boolean' or vartype == 'bool':
var = self.config.getboolean(category, name)
else:
print("Please enter 'float', 'string' (or 'str'), 'integer' (or 'int'), or 'boolean (or 'bool') for vartype")
sys.exit() # Won't exit, but will lead to exception
return var
except:
if optional:
# Carry on if the variable is optional
var = None
if self.Verbose or self.Debug:
if self.grass == False:
print("")
print('No value entered for optional parameter "' + name + '"')
print('in category "' + category + '" in configuration file.')
print("No action related to this optional parameter will be taken.")
print("")
else:
print('Problem loading ' + vartype + ' "' + name + '" in category "' + category + '" from configuration file.')
if specialReturnMessage:
print(specialReturnMessage)
sys.exit("Exiting.")
|
def function[configGet, parameter[self, vartype, category, name, optional, specialReturnMessage]]:
constant[
Wraps a try / except and a check for self.filename around ConfigParser
as it talks to the configuration file.
Also, checks for existence of configuration file so this won't execute (and fail)
when no configuration file is provided (e.g., running in coupled mode with CSDMS
entirely with getters and setters)
vartype can be 'float', 'str' or 'string' (str and string are the same),
or 'int' or 'integer' (also the same).
"Optional" determines whether or not the program will exit if the variable
fails to load. Set it to "True" if you don't want it to exit. In this case,
the variable will be set to "None". Otherwise, it defaults to "False".
"specialReturnMessage" is something that you would like to add at the end
of a failure to execute message. By default it does not print.
]
<ast.Try object at 0x7da1b2566b60>
|
keyword[def] identifier[configGet] ( identifier[self] , identifier[vartype] , identifier[category] , identifier[name] , identifier[optional] = keyword[False] , identifier[specialReturnMessage] = keyword[None] ):
literal[string]
keyword[try] :
keyword[if] identifier[vartype] == literal[string] :
identifier[var] = identifier[self] . identifier[config] . identifier[getfloat] ( identifier[category] , identifier[name] )
keyword[elif] identifier[vartype] == literal[string] keyword[or] identifier[vartype] == literal[string] :
identifier[var] = identifier[self] . identifier[config] . identifier[get] ( identifier[category] , identifier[name] )
keyword[if] identifier[var] == literal[string] keyword[and] identifier[optional] == keyword[False] :
keyword[if] identifier[name] [: literal[int] ]!= literal[string] :
keyword[if] identifier[self] . identifier[Quiet] != keyword[True] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[name] , literal[string] )
identifier[print] ( literal[string] )
keyword[elif] identifier[vartype] == literal[string] keyword[or] identifier[vartype] == literal[string] :
identifier[var] = identifier[self] . identifier[config] . identifier[getint] ( identifier[category] , identifier[name] )
keyword[elif] identifier[vartype] == literal[string] keyword[or] identifier[vartype] == literal[string] :
identifier[var] = identifier[self] . identifier[config] . identifier[getboolean] ( identifier[category] , identifier[name] )
keyword[else] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[return] identifier[var]
keyword[except] :
keyword[if] identifier[optional] :
identifier[var] = keyword[None]
keyword[if] identifier[self] . identifier[Verbose] keyword[or] identifier[self] . identifier[Debug] :
keyword[if] identifier[self] . identifier[grass] == keyword[False] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] + identifier[name] + literal[string] )
identifier[print] ( literal[string] + identifier[category] + literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] + identifier[vartype] + literal[string] + identifier[name] + literal[string] + identifier[category] + literal[string] )
keyword[if] identifier[specialReturnMessage] :
identifier[print] ( identifier[specialReturnMessage] )
identifier[sys] . identifier[exit] ( literal[string] )
|
def configGet(self, vartype, category, name, optional=False, specialReturnMessage=None):
"""
Wraps a try / except and a check for self.filename around ConfigParser
as it talks to the configuration file.
Also, checks for existence of configuration file so this won't execute (and fail)
when no configuration file is provided (e.g., running in coupled mode with CSDMS
entirely with getters and setters)
vartype can be 'float', 'str' or 'string' (str and string are the same),
or 'int' or 'integer' (also the same).
"Optional" determines whether or not the program will exit if the variable
fails to load. Set it to "True" if you don't want it to exit. In this case,
the variable will be set to "None". Otherwise, it defaults to "False".
"specialReturnMessage" is something that you would like to add at the end
of a failure to execute message. By default it does not print.
"""
try:
if vartype == 'float':
var = self.config.getfloat(category, name) # depends on [control=['if'], data=[]]
elif vartype == 'string' or vartype == 'str':
var = self.config.get(category, name)
if var == '' and optional == False:
# but "" is acceptable for boundary conditions
if name[:17] != 'BoundaryCondition':
if self.Quiet != True:
print('An empty input string here is not an acceptable option.')
print(name, 'is not optional.')
print('Program crash likely to occur.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif vartype == 'integer' or vartype == 'int':
var = self.config.getint(category, name) # depends on [control=['if'], data=[]]
elif vartype == 'boolean' or vartype == 'bool':
var = self.config.getboolean(category, name) # depends on [control=['if'], data=[]]
else:
print("Please enter 'float', 'string' (or 'str'), 'integer' (or 'int'), or 'boolean (or 'bool') for vartype")
sys.exit() # Won't exit, but will lead to exception
return var # depends on [control=['try'], data=[]]
except:
if optional:
# Carry on if the variable is optional
var = None
if self.Verbose or self.Debug:
if self.grass == False:
print('')
print('No value entered for optional parameter "' + name + '"')
print('in category "' + category + '" in configuration file.')
print('No action related to this optional parameter will be taken.')
print('') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('Problem loading ' + vartype + ' "' + name + '" in category "' + category + '" from configuration file.')
if specialReturnMessage:
print(specialReturnMessage) # depends on [control=['if'], data=[]]
sys.exit('Exiting.') # depends on [control=['except'], data=[]]
|
def distinct(l):
"""
Return a list where the duplicates have been removed.
Args:
l (list): the list to filter.
Returns:
list: the same list without duplicates.
"""
seen = set()
seen_add = seen.add
return (_ for _ in l if not (_ in seen or seen_add(_)))
|
def function[distinct, parameter[l]]:
constant[
Return a list where the duplicates have been removed.
Args:
l (list): the list to filter.
Returns:
list: the same list without duplicates.
]
variable[seen] assign[=] call[name[set], parameter[]]
variable[seen_add] assign[=] name[seen].add
return[<ast.GeneratorExp object at 0x7da1b244f9a0>]
|
keyword[def] identifier[distinct] ( identifier[l] ):
literal[string]
identifier[seen] = identifier[set] ()
identifier[seen_add] = identifier[seen] . identifier[add]
keyword[return] ( identifier[_] keyword[for] identifier[_] keyword[in] identifier[l] keyword[if] keyword[not] ( identifier[_] keyword[in] identifier[seen] keyword[or] identifier[seen_add] ( identifier[_] )))
|
def distinct(l):
"""
Return a list where the duplicates have been removed.
Args:
l (list): the list to filter.
Returns:
list: the same list without duplicates.
"""
seen = set()
seen_add = seen.add
return (_ for _ in l if not (_ in seen or seen_add(_)))
|
def get_elt_projected_plots(self, zero_to_efermi=True, ylim=None,
vbm_cbm_marker=False):
"""
Method returning a plot composed of subplots along different elements
Returns:
a pylab object with different subfigures for each projection
The blue and red colors are for spin up and spin down
The bigger the red or blue dot in the band structure the higher
character for the corresponding element and orbital
"""
band_linewidth = 1.0
proj = self._get_projections_by_branches({e.symbol: ['s', 'p', 'd']
for e in
self._bs.structure.composition.elements})
data = self.bs_plot_data(zero_to_efermi)
plt = pretty_plot(12, 8)
e_min = -4
e_max = 4
if self._bs.is_metal():
e_min = -10
e_max = 10
count = 1
for el in self._bs.structure.composition.elements:
plt.subplot(220 + count)
self._maketicks(plt)
for b in range(len(data['distances'])):
for i in range(self._nb_bands):
plt.plot(data['distances'][b],
[data['energy'][b][str(Spin.up)][i][j]
for j in range(len(data['distances'][b]))], '-',
color=[192 / 255, 192 / 255, 192 / 255],
linewidth=band_linewidth)
if self._bs.is_spin_polarized:
plt.plot(data['distances'][b],
[data['energy'][b][str(Spin.down)][i][j]
for j in range(len(data['distances'][b]))],
'--', color=[128 / 255, 128 / 255, 128 / 255],
linewidth=band_linewidth)
for j in range(len(data['energy'][b][str(Spin.up)][i])):
markerscale = sum([proj[b][str(Spin.down)][i][
j][str(el)][o] for o in
proj[b]
[str(Spin.down)][i][j][
str(el)]])
plt.plot(data['distances'][b][j],
data['energy'][b][str(Spin.down)][i][j],
'bo',
markersize=markerscale * 15.0,
color=[markerscale, 0.3 * markerscale,
0.4 * markerscale])
for j in range(len(data['energy'][b][str(Spin.up)][i])):
markerscale = sum(
[proj[b][str(Spin.up)][i][j][str(el)][o]
for o in proj[b]
[str(Spin.up)][i][j][str(el)]])
plt.plot(data['distances'][b][j],
data['energy'][b][str(Spin.up)][i][j], 'o',
markersize=markerscale * 15.0,
color=[markerscale, 0.3 * markerscale,
0.4 * markerscale])
if ylim is None:
if self._bs.is_metal():
if zero_to_efermi:
plt.ylim(e_min, e_max)
else:
plt.ylim(self._bs.efermi + e_min, self._bs.efermi
+ e_max)
else:
if vbm_cbm_marker:
for cbm in data['cbm']:
plt.scatter(cbm[0], cbm[1], color='r', marker='o',
s=100)
for vbm in data['vbm']:
plt.scatter(vbm[0], vbm[1], color='g', marker='o',
s=100)
plt.ylim(data['vbm'][0][1] + e_min, data['cbm'][0][1]
+ e_max)
else:
plt.ylim(ylim)
plt.title(str(el))
count += 1
return plt
|
def function[get_elt_projected_plots, parameter[self, zero_to_efermi, ylim, vbm_cbm_marker]]:
constant[
Method returning a plot composed of subplots along different elements
Returns:
a pylab object with different subfigures for each projection
The blue and red colors are for spin up and spin down
The bigger the red or blue dot in the band structure the higher
character for the corresponding element and orbital
]
variable[band_linewidth] assign[=] constant[1.0]
variable[proj] assign[=] call[name[self]._get_projections_by_branches, parameter[<ast.DictComp object at 0x7da18f58dc90>]]
variable[data] assign[=] call[name[self].bs_plot_data, parameter[name[zero_to_efermi]]]
variable[plt] assign[=] call[name[pretty_plot], parameter[constant[12], constant[8]]]
variable[e_min] assign[=] <ast.UnaryOp object at 0x7da18f58fc70>
variable[e_max] assign[=] constant[4]
if call[name[self]._bs.is_metal, parameter[]] begin[:]
variable[e_min] assign[=] <ast.UnaryOp object at 0x7da18f58c940>
variable[e_max] assign[=] constant[10]
variable[count] assign[=] constant[1]
for taget[name[el]] in starred[name[self]._bs.structure.composition.elements] begin[:]
call[name[plt].subplot, parameter[binary_operation[constant[220] + name[count]]]]
call[name[self]._maketicks, parameter[name[plt]]]
for taget[name[b]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[data]][constant[distances]]]]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[self]._nb_bands]]] begin[:]
call[name[plt].plot, parameter[call[call[name[data]][constant[distances]]][name[b]], <ast.ListComp object at 0x7da18f58c310>, constant[-]]]
if name[self]._bs.is_spin_polarized begin[:]
call[name[plt].plot, parameter[call[call[name[data]][constant[distances]]][name[b]], <ast.ListComp object at 0x7da18f58e560>, constant[--]]]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[call[call[call[call[name[data]][constant[energy]]][name[b]]][call[name[str], parameter[name[Spin].up]]]][name[i]]]]]]] begin[:]
variable[markerscale] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da18f58e5c0>]]
call[name[plt].plot, parameter[call[call[call[name[data]][constant[distances]]][name[b]]][name[j]], call[call[call[call[call[name[data]][constant[energy]]][name[b]]][call[name[str], parameter[name[Spin].down]]]][name[i]]][name[j]], constant[bo]]]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[call[call[call[call[name[data]][constant[energy]]][name[b]]][call[name[str], parameter[name[Spin].up]]]][name[i]]]]]]] begin[:]
variable[markerscale] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20c76c580>]]
call[name[plt].plot, parameter[call[call[call[name[data]][constant[distances]]][name[b]]][name[j]], call[call[call[call[call[name[data]][constant[energy]]][name[b]]][call[name[str], parameter[name[Spin].up]]]][name[i]]][name[j]], constant[o]]]
if compare[name[ylim] is constant[None]] begin[:]
if call[name[self]._bs.is_metal, parameter[]] begin[:]
if name[zero_to_efermi] begin[:]
call[name[plt].ylim, parameter[name[e_min], name[e_max]]]
call[name[plt].title, parameter[call[name[str], parameter[name[el]]]]]
<ast.AugAssign object at 0x7da20c7cb3d0>
return[name[plt]]
|
keyword[def] identifier[get_elt_projected_plots] ( identifier[self] , identifier[zero_to_efermi] = keyword[True] , identifier[ylim] = keyword[None] ,
identifier[vbm_cbm_marker] = keyword[False] ):
literal[string]
identifier[band_linewidth] = literal[int]
identifier[proj] = identifier[self] . identifier[_get_projections_by_branches] ({ identifier[e] . identifier[symbol] :[ literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[e] keyword[in]
identifier[self] . identifier[_bs] . identifier[structure] . identifier[composition] . identifier[elements] })
identifier[data] = identifier[self] . identifier[bs_plot_data] ( identifier[zero_to_efermi] )
identifier[plt] = identifier[pretty_plot] ( literal[int] , literal[int] )
identifier[e_min] =- literal[int]
identifier[e_max] = literal[int]
keyword[if] identifier[self] . identifier[_bs] . identifier[is_metal] ():
identifier[e_min] =- literal[int]
identifier[e_max] = literal[int]
identifier[count] = literal[int]
keyword[for] identifier[el] keyword[in] identifier[self] . identifier[_bs] . identifier[structure] . identifier[composition] . identifier[elements] :
identifier[plt] . identifier[subplot] ( literal[int] + identifier[count] )
identifier[self] . identifier[_maketicks] ( identifier[plt] )
keyword[for] identifier[b] keyword[in] identifier[range] ( identifier[len] ( identifier[data] [ literal[string] ])):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_nb_bands] ):
identifier[plt] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[b] ],
[ identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ][ identifier[j] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[data] [ literal[string] ][ identifier[b] ]))], literal[string] ,
identifier[color] =[ literal[int] / literal[int] , literal[int] / literal[int] , literal[int] / literal[int] ],
identifier[linewidth] = identifier[band_linewidth] )
keyword[if] identifier[self] . identifier[_bs] . identifier[is_spin_polarized] :
identifier[plt] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[b] ],
[ identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[down] )][ identifier[i] ][ identifier[j] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[data] [ literal[string] ][ identifier[b] ]))],
literal[string] , identifier[color] =[ literal[int] / literal[int] , literal[int] / literal[int] , literal[int] / literal[int] ],
identifier[linewidth] = identifier[band_linewidth] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ])):
identifier[markerscale] = identifier[sum] ([ identifier[proj] [ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[down] )][ identifier[i] ][
identifier[j] ][ identifier[str] ( identifier[el] )][ identifier[o] ] keyword[for] identifier[o] keyword[in]
identifier[proj] [ identifier[b] ]
[ identifier[str] ( identifier[Spin] . identifier[down] )][ identifier[i] ][ identifier[j] ][
identifier[str] ( identifier[el] )]])
identifier[plt] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[b] ][ identifier[j] ],
identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[down] )][ identifier[i] ][ identifier[j] ],
literal[string] ,
identifier[markersize] = identifier[markerscale] * literal[int] ,
identifier[color] =[ identifier[markerscale] , literal[int] * identifier[markerscale] ,
literal[int] * identifier[markerscale] ])
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ])):
identifier[markerscale] = identifier[sum] (
[ identifier[proj] [ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ][ identifier[j] ][ identifier[str] ( identifier[el] )][ identifier[o] ]
keyword[for] identifier[o] keyword[in] identifier[proj] [ identifier[b] ]
[ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ][ identifier[j] ][ identifier[str] ( identifier[el] )]])
identifier[plt] . identifier[plot] ( identifier[data] [ literal[string] ][ identifier[b] ][ identifier[j] ],
identifier[data] [ literal[string] ][ identifier[b] ][ identifier[str] ( identifier[Spin] . identifier[up] )][ identifier[i] ][ identifier[j] ], literal[string] ,
identifier[markersize] = identifier[markerscale] * literal[int] ,
identifier[color] =[ identifier[markerscale] , literal[int] * identifier[markerscale] ,
literal[int] * identifier[markerscale] ])
keyword[if] identifier[ylim] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[_bs] . identifier[is_metal] ():
keyword[if] identifier[zero_to_efermi] :
identifier[plt] . identifier[ylim] ( identifier[e_min] , identifier[e_max] )
keyword[else] :
identifier[plt] . identifier[ylim] ( identifier[self] . identifier[_bs] . identifier[efermi] + identifier[e_min] , identifier[self] . identifier[_bs] . identifier[efermi]
+ identifier[e_max] )
keyword[else] :
keyword[if] identifier[vbm_cbm_marker] :
keyword[for] identifier[cbm] keyword[in] identifier[data] [ literal[string] ]:
identifier[plt] . identifier[scatter] ( identifier[cbm] [ literal[int] ], identifier[cbm] [ literal[int] ], identifier[color] = literal[string] , identifier[marker] = literal[string] ,
identifier[s] = literal[int] )
keyword[for] identifier[vbm] keyword[in] identifier[data] [ literal[string] ]:
identifier[plt] . identifier[scatter] ( identifier[vbm] [ literal[int] ], identifier[vbm] [ literal[int] ], identifier[color] = literal[string] , identifier[marker] = literal[string] ,
identifier[s] = literal[int] )
identifier[plt] . identifier[ylim] ( identifier[data] [ literal[string] ][ literal[int] ][ literal[int] ]+ identifier[e_min] , identifier[data] [ literal[string] ][ literal[int] ][ literal[int] ]
+ identifier[e_max] )
keyword[else] :
identifier[plt] . identifier[ylim] ( identifier[ylim] )
identifier[plt] . identifier[title] ( identifier[str] ( identifier[el] ))
identifier[count] += literal[int]
keyword[return] identifier[plt]
|
def get_elt_projected_plots(self, zero_to_efermi=True, ylim=None, vbm_cbm_marker=False):
"""
Method returning a plot composed of subplots along different elements
Returns:
a pylab object with different subfigures for each projection
The blue and red colors are for spin up and spin down
The bigger the red or blue dot in the band structure the higher
character for the corresponding element and orbital
"""
band_linewidth = 1.0
proj = self._get_projections_by_branches({e.symbol: ['s', 'p', 'd'] for e in self._bs.structure.composition.elements})
data = self.bs_plot_data(zero_to_efermi)
plt = pretty_plot(12, 8)
e_min = -4
e_max = 4
if self._bs.is_metal():
e_min = -10
e_max = 10 # depends on [control=['if'], data=[]]
count = 1
for el in self._bs.structure.composition.elements:
plt.subplot(220 + count)
self._maketicks(plt)
for b in range(len(data['distances'])):
for i in range(self._nb_bands):
plt.plot(data['distances'][b], [data['energy'][b][str(Spin.up)][i][j] for j in range(len(data['distances'][b]))], '-', color=[192 / 255, 192 / 255, 192 / 255], linewidth=band_linewidth)
if self._bs.is_spin_polarized:
plt.plot(data['distances'][b], [data['energy'][b][str(Spin.down)][i][j] for j in range(len(data['distances'][b]))], '--', color=[128 / 255, 128 / 255, 128 / 255], linewidth=band_linewidth)
for j in range(len(data['energy'][b][str(Spin.up)][i])):
markerscale = sum([proj[b][str(Spin.down)][i][j][str(el)][o] for o in proj[b][str(Spin.down)][i][j][str(el)]])
plt.plot(data['distances'][b][j], data['energy'][b][str(Spin.down)][i][j], 'bo', markersize=markerscale * 15.0, color=[markerscale, 0.3 * markerscale, 0.4 * markerscale]) # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]]
for j in range(len(data['energy'][b][str(Spin.up)][i])):
markerscale = sum([proj[b][str(Spin.up)][i][j][str(el)][o] for o in proj[b][str(Spin.up)][i][j][str(el)]])
plt.plot(data['distances'][b][j], data['energy'][b][str(Spin.up)][i][j], 'o', markersize=markerscale * 15.0, color=[markerscale, 0.3 * markerscale, 0.4 * markerscale]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['b']]
if ylim is None:
if self._bs.is_metal():
if zero_to_efermi:
plt.ylim(e_min, e_max) # depends on [control=['if'], data=[]]
else:
plt.ylim(self._bs.efermi + e_min, self._bs.efermi + e_max) # depends on [control=['if'], data=[]]
else:
if vbm_cbm_marker:
for cbm in data['cbm']:
plt.scatter(cbm[0], cbm[1], color='r', marker='o', s=100) # depends on [control=['for'], data=['cbm']]
for vbm in data['vbm']:
plt.scatter(vbm[0], vbm[1], color='g', marker='o', s=100) # depends on [control=['for'], data=['vbm']] # depends on [control=['if'], data=[]]
plt.ylim(data['vbm'][0][1] + e_min, data['cbm'][0][1] + e_max) # depends on [control=['if'], data=[]]
else:
plt.ylim(ylim)
plt.title(str(el))
count += 1 # depends on [control=['for'], data=['el']]
return plt
|
def experiments_fmri_get(self, resource_url):
"""Get handle for functional fMRI resource at given Url.
Parameters
----------
resource_url : string
Url for fMRI resource at SCO-API
Returns
-------
scoserv.FunctionalDataHandle
Handle for funcrional MRI data resource
"""
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create image group handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
fmri_data = FunctionalDataHandle(obj_json, obj_dir)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return functional data handle
return fmri_data
|
def function[experiments_fmri_get, parameter[self, resource_url]]:
constant[Get handle for functional fMRI resource at given Url.
Parameters
----------
resource_url : string
Url for fMRI resource at SCO-API
Returns
-------
scoserv.FunctionalDataHandle
Handle for funcrional MRI data resource
]
<ast.Tuple object at 0x7da2044c32e0> assign[=] call[name[self].get_object, parameter[name[resource_url]]]
variable[fmri_data] assign[=] call[name[FunctionalDataHandle], parameter[name[obj_json], name[obj_dir]]]
if <ast.UnaryOp object at 0x7da2044c1cf0> begin[:]
call[name[self].cache_add, parameter[name[resource_url], name[cache_id]]]
return[name[fmri_data]]
|
keyword[def] identifier[experiments_fmri_get] ( identifier[self] , identifier[resource_url] ):
literal[string]
identifier[obj_dir] , identifier[obj_json] , identifier[is_active] , identifier[cache_id] = identifier[self] . identifier[get_object] ( identifier[resource_url] )
identifier[fmri_data] = identifier[FunctionalDataHandle] ( identifier[obj_json] , identifier[obj_dir] )
keyword[if] keyword[not] identifier[cache_id] keyword[in] identifier[self] . identifier[cache] :
identifier[self] . identifier[cache_add] ( identifier[resource_url] , identifier[cache_id] )
keyword[return] identifier[fmri_data]
|
def experiments_fmri_get(self, resource_url):
"""Get handle for functional fMRI resource at given Url.
Parameters
----------
resource_url : string
Url for fMRI resource at SCO-API
Returns
-------
scoserv.FunctionalDataHandle
Handle for funcrional MRI data resource
"""
# Get resource directory, Json representation, active flag, and cache id
(obj_dir, obj_json, is_active, cache_id) = self.get_object(resource_url)
# Create image group handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
fmri_data = FunctionalDataHandle(obj_json, obj_dir)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id) # depends on [control=['if'], data=[]]
# Return functional data handle
return fmri_data
|
def unflatten_list(flat_dict, separator='_'):
"""
Unflattens a dictionary, first assuming no lists exist and then tries to
identify lists and replaces them
This is probably not very efficient and has not been tested extensively
Feel free to add test cases or rewrite the logic
Issues that stand out to me:
- Sorting all the keys in the dictionary, which specially for the root
dictionary can be a lot of keys
- Checking that numbers are consecutive is O(N) in number of keys
:param flat_dict: dictionary with no hierarchy
:param separator: a string that separates keys
:return: a dictionary with hierarchy
"""
_unflatten_asserts(flat_dict, separator)
# First unflatten the dictionary assuming no lists exist
unflattened_dict = unflatten(flat_dict, separator)
def _convert_dict_to_list(object_, parent_object, parent_object_key):
if isinstance(object_, dict):
for key in object_:
if isinstance(object_[key], dict):
_convert_dict_to_list(object_[key], object_, key)
try:
keys = [int(key) for key in object_]
keys.sort()
except (ValueError, TypeError):
keys = []
keys_len = len(keys)
if (keys_len > 0 and sum(keys) ==
int(((keys_len - 1) * keys_len) / 2) and keys[0] == 0 and
keys[-1] == keys_len - 1 and
check_if_numbers_are_consecutive(keys)):
# The dictionary looks like a list so we're going to replace it
parent_object[parent_object_key] = []
for key_index, key in enumerate(keys):
parent_object[parent_object_key].append(object_[str(key)])
# The list item we just added might be a list itself
# https://github.com/amirziai/flatten/issues/15
_convert_dict_to_list(parent_object[parent_object_key][-1],
parent_object[parent_object_key],
key_index)
_convert_dict_to_list(unflattened_dict, None, None)
return unflattened_dict
|
def function[unflatten_list, parameter[flat_dict, separator]]:
constant[
Unflattens a dictionary, first assuming no lists exist and then tries to
identify lists and replaces them
This is probably not very efficient and has not been tested extensively
Feel free to add test cases or rewrite the logic
Issues that stand out to me:
- Sorting all the keys in the dictionary, which specially for the root
dictionary can be a lot of keys
- Checking that numbers are consecutive is O(N) in number of keys
:param flat_dict: dictionary with no hierarchy
:param separator: a string that separates keys
:return: a dictionary with hierarchy
]
call[name[_unflatten_asserts], parameter[name[flat_dict], name[separator]]]
variable[unflattened_dict] assign[=] call[name[unflatten], parameter[name[flat_dict], name[separator]]]
def function[_convert_dict_to_list, parameter[object_, parent_object, parent_object_key]]:
if call[name[isinstance], parameter[name[object_], name[dict]]] begin[:]
for taget[name[key]] in starred[name[object_]] begin[:]
if call[name[isinstance], parameter[call[name[object_]][name[key]], name[dict]]] begin[:]
call[name[_convert_dict_to_list], parameter[call[name[object_]][name[key]], name[object_], name[key]]]
<ast.Try object at 0x7da1b080b0a0>
variable[keys_len] assign[=] call[name[len], parameter[name[keys]]]
if <ast.BoolOp object at 0x7da1b080bf70> begin[:]
call[name[parent_object]][name[parent_object_key]] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b080a800>, <ast.Name object at 0x7da1b080ad70>]]] in starred[call[name[enumerate], parameter[name[keys]]]] begin[:]
call[call[name[parent_object]][name[parent_object_key]].append, parameter[call[name[object_]][call[name[str], parameter[name[key]]]]]]
call[name[_convert_dict_to_list], parameter[call[call[name[parent_object]][name[parent_object_key]]][<ast.UnaryOp object at 0x7da1b080b610>], call[name[parent_object]][name[parent_object_key]], name[key_index]]]
call[name[_convert_dict_to_list], parameter[name[unflattened_dict], constant[None], constant[None]]]
return[name[unflattened_dict]]
|
keyword[def] identifier[unflatten_list] ( identifier[flat_dict] , identifier[separator] = literal[string] ):
literal[string]
identifier[_unflatten_asserts] ( identifier[flat_dict] , identifier[separator] )
identifier[unflattened_dict] = identifier[unflatten] ( identifier[flat_dict] , identifier[separator] )
keyword[def] identifier[_convert_dict_to_list] ( identifier[object_] , identifier[parent_object] , identifier[parent_object_key] ):
keyword[if] identifier[isinstance] ( identifier[object_] , identifier[dict] ):
keyword[for] identifier[key] keyword[in] identifier[object_] :
keyword[if] identifier[isinstance] ( identifier[object_] [ identifier[key] ], identifier[dict] ):
identifier[_convert_dict_to_list] ( identifier[object_] [ identifier[key] ], identifier[object_] , identifier[key] )
keyword[try] :
identifier[keys] =[ identifier[int] ( identifier[key] ) keyword[for] identifier[key] keyword[in] identifier[object_] ]
identifier[keys] . identifier[sort] ()
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
identifier[keys] =[]
identifier[keys_len] = identifier[len] ( identifier[keys] )
keyword[if] ( identifier[keys_len] > literal[int] keyword[and] identifier[sum] ( identifier[keys] )==
identifier[int] ((( identifier[keys_len] - literal[int] )* identifier[keys_len] )/ literal[int] ) keyword[and] identifier[keys] [ literal[int] ]== literal[int] keyword[and]
identifier[keys] [- literal[int] ]== identifier[keys_len] - literal[int] keyword[and]
identifier[check_if_numbers_are_consecutive] ( identifier[keys] )):
identifier[parent_object] [ identifier[parent_object_key] ]=[]
keyword[for] identifier[key_index] , identifier[key] keyword[in] identifier[enumerate] ( identifier[keys] ):
identifier[parent_object] [ identifier[parent_object_key] ]. identifier[append] ( identifier[object_] [ identifier[str] ( identifier[key] )])
identifier[_convert_dict_to_list] ( identifier[parent_object] [ identifier[parent_object_key] ][- literal[int] ],
identifier[parent_object] [ identifier[parent_object_key] ],
identifier[key_index] )
identifier[_convert_dict_to_list] ( identifier[unflattened_dict] , keyword[None] , keyword[None] )
keyword[return] identifier[unflattened_dict]
|
def unflatten_list(flat_dict, separator='_'):
"""
Unflattens a dictionary, first assuming no lists exist and then tries to
identify lists and replaces them
This is probably not very efficient and has not been tested extensively
Feel free to add test cases or rewrite the logic
Issues that stand out to me:
- Sorting all the keys in the dictionary, which specially for the root
dictionary can be a lot of keys
- Checking that numbers are consecutive is O(N) in number of keys
:param flat_dict: dictionary with no hierarchy
:param separator: a string that separates keys
:return: a dictionary with hierarchy
"""
_unflatten_asserts(flat_dict, separator)
# First unflatten the dictionary assuming no lists exist
unflattened_dict = unflatten(flat_dict, separator)
def _convert_dict_to_list(object_, parent_object, parent_object_key):
if isinstance(object_, dict):
for key in object_:
if isinstance(object_[key], dict):
_convert_dict_to_list(object_[key], object_, key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
try:
keys = [int(key) for key in object_]
keys.sort() # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
keys = [] # depends on [control=['except'], data=[]]
keys_len = len(keys)
if keys_len > 0 and sum(keys) == int((keys_len - 1) * keys_len / 2) and (keys[0] == 0) and (keys[-1] == keys_len - 1) and check_if_numbers_are_consecutive(keys):
# The dictionary looks like a list so we're going to replace it
parent_object[parent_object_key] = []
for (key_index, key) in enumerate(keys):
parent_object[parent_object_key].append(object_[str(key)])
# The list item we just added might be a list itself
# https://github.com/amirziai/flatten/issues/15
_convert_dict_to_list(parent_object[parent_object_key][-1], parent_object[parent_object_key], key_index) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
_convert_dict_to_list(unflattened_dict, None, None)
return unflattened_dict
|
def location(self):
"""
The location for this engine. May be None if no specific
location has been assigned.
:param value: location to assign engine. Can be name, str href,
or Location element. If name, it will be automatically created
if a Location with the same name doesn't exist.
:raises UpdateElementFailed: failure to update element
:return: Location element or None
"""
location = Element.from_href(self.location_ref)
if location and location.name == 'Default':
return None
return location
|
def function[location, parameter[self]]:
constant[
The location for this engine. May be None if no specific
location has been assigned.
:param value: location to assign engine. Can be name, str href,
or Location element. If name, it will be automatically created
if a Location with the same name doesn't exist.
:raises UpdateElementFailed: failure to update element
:return: Location element or None
]
variable[location] assign[=] call[name[Element].from_href, parameter[name[self].location_ref]]
if <ast.BoolOp object at 0x7da1b1a2b6d0> begin[:]
return[constant[None]]
return[name[location]]
|
keyword[def] identifier[location] ( identifier[self] ):
literal[string]
identifier[location] = identifier[Element] . identifier[from_href] ( identifier[self] . identifier[location_ref] )
keyword[if] identifier[location] keyword[and] identifier[location] . identifier[name] == literal[string] :
keyword[return] keyword[None]
keyword[return] identifier[location]
|
def location(self):
"""
The location for this engine. May be None if no specific
location has been assigned.
:param value: location to assign engine. Can be name, str href,
or Location element. If name, it will be automatically created
if a Location with the same name doesn't exist.
:raises UpdateElementFailed: failure to update element
:return: Location element or None
"""
location = Element.from_href(self.location_ref)
if location and location.name == 'Default':
return None # depends on [control=['if'], data=[]]
return location
|
def breathe_identifier(self):
"""
The unique identifier for breathe directives.
.. note::
This method is currently assumed to only be called for nodes that are
in :data:`exhale.utils.LEAF_LIKE_KINDS` (see also
:func:`exhale.graph.ExhaleRoot.generateSingleNodeRST` where it is used).
**Return**
:class:`python:str`
Usually, this will just be ``self.name``. However, for functions in
particular the signature must be included to distinguish overloads.
"""
if self.kind == "function":
# TODO: breathe bug with templates and overloads, don't know what to do...
return "{name}({parameters})".format(
name=self.name,
parameters=", ".join(self.parameters)
)
return self.name
|
def function[breathe_identifier, parameter[self]]:
constant[
The unique identifier for breathe directives.
.. note::
This method is currently assumed to only be called for nodes that are
in :data:`exhale.utils.LEAF_LIKE_KINDS` (see also
:func:`exhale.graph.ExhaleRoot.generateSingleNodeRST` where it is used).
**Return**
:class:`python:str`
Usually, this will just be ``self.name``. However, for functions in
particular the signature must be included to distinguish overloads.
]
if compare[name[self].kind equal[==] constant[function]] begin[:]
return[call[constant[{name}({parameters})].format, parameter[]]]
return[name[self].name]
|
keyword[def] identifier[breathe_identifier] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[kind] == literal[string] :
keyword[return] literal[string] . identifier[format] (
identifier[name] = identifier[self] . identifier[name] ,
identifier[parameters] = literal[string] . identifier[join] ( identifier[self] . identifier[parameters] )
)
keyword[return] identifier[self] . identifier[name]
|
def breathe_identifier(self):
"""
The unique identifier for breathe directives.
.. note::
This method is currently assumed to only be called for nodes that are
in :data:`exhale.utils.LEAF_LIKE_KINDS` (see also
:func:`exhale.graph.ExhaleRoot.generateSingleNodeRST` where it is used).
**Return**
:class:`python:str`
Usually, this will just be ``self.name``. However, for functions in
particular the signature must be included to distinguish overloads.
"""
if self.kind == 'function':
# TODO: breathe bug with templates and overloads, don't know what to do...
return '{name}({parameters})'.format(name=self.name, parameters=', '.join(self.parameters)) # depends on [control=['if'], data=[]]
return self.name
|
def clean(self):
""" Remove intermediate files created.
"""
#TODO: add cleaning of mask files, *if* created ...
for f in self.catalog_names:
if 'match' in f:
if os.path.exists(self.catalog_names[f]):
log.info('Deleting intermediate match file: %s'%
self.catalog_names[f])
os.remove(self.catalog_names[f])
else:
for extn in f:
if os.path.exists(extn):
log.info('Deleting intermediate catalog: %d'%extn)
os.remove(extn)
|
def function[clean, parameter[self]]:
constant[ Remove intermediate files created.
]
for taget[name[f]] in starred[name[self].catalog_names] begin[:]
if compare[constant[match] in name[f]] begin[:]
if call[name[os].path.exists, parameter[call[name[self].catalog_names][name[f]]]] begin[:]
call[name[log].info, parameter[binary_operation[constant[Deleting intermediate match file: %s] <ast.Mod object at 0x7da2590d6920> call[name[self].catalog_names][name[f]]]]]
call[name[os].remove, parameter[call[name[self].catalog_names][name[f]]]]
|
keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[catalog_names] :
keyword[if] literal[string] keyword[in] identifier[f] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[catalog_names] [ identifier[f] ]):
identifier[log] . identifier[info] ( literal[string] %
identifier[self] . identifier[catalog_names] [ identifier[f] ])
identifier[os] . identifier[remove] ( identifier[self] . identifier[catalog_names] [ identifier[f] ])
keyword[else] :
keyword[for] identifier[extn] keyword[in] identifier[f] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[extn] ):
identifier[log] . identifier[info] ( literal[string] % identifier[extn] )
identifier[os] . identifier[remove] ( identifier[extn] )
|
def clean(self):
""" Remove intermediate files created.
"""
#TODO: add cleaning of mask files, *if* created ...
for f in self.catalog_names:
if 'match' in f:
if os.path.exists(self.catalog_names[f]):
log.info('Deleting intermediate match file: %s' % self.catalog_names[f])
os.remove(self.catalog_names[f]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['f']]
else:
for extn in f:
if os.path.exists(extn):
log.info('Deleting intermediate catalog: %d' % extn)
os.remove(extn) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['extn']] # depends on [control=['for'], data=['f']]
|
def interpolate(requestContext, seriesList, limit=INF):
"""
Takes one metric or a wildcard seriesList, and optionally a limit to the
number of 'None' values to skip over. Continues the line with the last
received value when gaps ('None' values) appear in your data, rather than
breaking your line.
Example::
&target=interpolate(Server01.connections.handled)
&target=interpolate(Server01.connections.handled, 10)
"""
for series in seriesList:
series.name = "interpolate(%s)" % (series.name)
series.pathExpression = series.name
consecutiveNones = 0
for i, value in enumerate(series):
series[i] = value
# No 'keeping' can be done on the first value because we have no
# idea what came before it.
if i == 0:
continue
if value is None:
consecutiveNones += 1
elif consecutiveNones == 0:
# Have a value but no need to interpolate
continue
elif series[i - consecutiveNones - 1] is None:
# Have a value but can't interpolate: reset count
consecutiveNones = 0
continue
else:
# Have a value and can interpolate. If a non-None value is
# seen before the limit of Nones is hit, backfill all the
# missing datapoints with the last known value.
if consecutiveNones > 0 and consecutiveNones <= limit:
lastIndex = i - consecutiveNones - 1
lastValue = series[lastIndex]
for index in range(i - consecutiveNones, i):
nextValue = lastValue + (index - lastIndex)
nextValue = nextValue * (value - lastValue)
nextValue = nextValue / (consecutiveNones + 1)
series[index] = nextValue
consecutiveNones = 0
return seriesList
|
def function[interpolate, parameter[requestContext, seriesList, limit]]:
constant[
Takes one metric or a wildcard seriesList, and optionally a limit to the
number of 'None' values to skip over. Continues the line with the last
received value when gaps ('None' values) appear in your data, rather than
breaking your line.
Example::
&target=interpolate(Server01.connections.handled)
&target=interpolate(Server01.connections.handled, 10)
]
for taget[name[series]] in starred[name[seriesList]] begin[:]
name[series].name assign[=] binary_operation[constant[interpolate(%s)] <ast.Mod object at 0x7da2590d6920> name[series].name]
name[series].pathExpression assign[=] name[series].name
variable[consecutiveNones] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da20e793370>, <ast.Name object at 0x7da1b06d17b0>]]] in starred[call[name[enumerate], parameter[name[series]]]] begin[:]
call[name[series]][name[i]] assign[=] name[value]
if compare[name[i] equal[==] constant[0]] begin[:]
continue
if compare[name[value] is constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b2347d60>
return[name[seriesList]]
|
keyword[def] identifier[interpolate] ( identifier[requestContext] , identifier[seriesList] , identifier[limit] = identifier[INF] ):
literal[string]
keyword[for] identifier[series] keyword[in] identifier[seriesList] :
identifier[series] . identifier[name] = literal[string] %( identifier[series] . identifier[name] )
identifier[series] . identifier[pathExpression] = identifier[series] . identifier[name]
identifier[consecutiveNones] = literal[int]
keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[series] ):
identifier[series] [ identifier[i] ]= identifier[value]
keyword[if] identifier[i] == literal[int] :
keyword[continue]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[consecutiveNones] += literal[int]
keyword[elif] identifier[consecutiveNones] == literal[int] :
keyword[continue]
keyword[elif] identifier[series] [ identifier[i] - identifier[consecutiveNones] - literal[int] ] keyword[is] keyword[None] :
identifier[consecutiveNones] = literal[int]
keyword[continue]
keyword[else] :
keyword[if] identifier[consecutiveNones] > literal[int] keyword[and] identifier[consecutiveNones] <= identifier[limit] :
identifier[lastIndex] = identifier[i] - identifier[consecutiveNones] - literal[int]
identifier[lastValue] = identifier[series] [ identifier[lastIndex] ]
keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[i] - identifier[consecutiveNones] , identifier[i] ):
identifier[nextValue] = identifier[lastValue] +( identifier[index] - identifier[lastIndex] )
identifier[nextValue] = identifier[nextValue] *( identifier[value] - identifier[lastValue] )
identifier[nextValue] = identifier[nextValue] /( identifier[consecutiveNones] + literal[int] )
identifier[series] [ identifier[index] ]= identifier[nextValue]
identifier[consecutiveNones] = literal[int]
keyword[return] identifier[seriesList]
|
def interpolate(requestContext, seriesList, limit=INF):
"""
Takes one metric or a wildcard seriesList, and optionally a limit to the
number of 'None' values to skip over. Continues the line with the last
received value when gaps ('None' values) appear in your data, rather than
breaking your line.
Example::
&target=interpolate(Server01.connections.handled)
&target=interpolate(Server01.connections.handled, 10)
"""
for series in seriesList:
series.name = 'interpolate(%s)' % series.name
series.pathExpression = series.name
consecutiveNones = 0
for (i, value) in enumerate(series):
series[i] = value
# No 'keeping' can be done on the first value because we have no
# idea what came before it.
if i == 0:
continue # depends on [control=['if'], data=[]]
if value is None:
consecutiveNones += 1 # depends on [control=['if'], data=[]]
elif consecutiveNones == 0:
# Have a value but no need to interpolate
continue # depends on [control=['if'], data=[]]
elif series[i - consecutiveNones - 1] is None:
# Have a value but can't interpolate: reset count
consecutiveNones = 0
continue # depends on [control=['if'], data=[]]
else:
# Have a value and can interpolate. If a non-None value is
# seen before the limit of Nones is hit, backfill all the
# missing datapoints with the last known value.
if consecutiveNones > 0 and consecutiveNones <= limit:
lastIndex = i - consecutiveNones - 1
lastValue = series[lastIndex]
for index in range(i - consecutiveNones, i):
nextValue = lastValue + (index - lastIndex)
nextValue = nextValue * (value - lastValue)
nextValue = nextValue / (consecutiveNones + 1)
series[index] = nextValue # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=[]]
consecutiveNones = 0 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['series']]
return seriesList
|
def handle_protein_results(permutation_result):
"""Takes in output from multiprocess_permutation function and converts to
a better formatted dataframe.
Parameters
----------
permutation_result : list
output from multiprocess_permutation
Returns
-------
permutation_df : pd.DataFrame
formatted output suitable to save
"""
mycols = ['gene', 'num recurrent', 'normalized graph-smoothed position entropy',
'normalized graph-smoothed position entropy p-value',
'Total Mutations', 'Unmapped to Ref Tx']
permutation_df = pd.DataFrame(permutation_result, columns=mycols)
# get benjamani hochberg adjusted p-values
permutation_df['normalized graph-smoothed position entropy BH q-value'] = mypval.bh_fdr(permutation_df['normalized graph-smoothed position entropy p-value'])
# order output
permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices
col_order = ['gene', 'Total Mutations', 'Unmapped to Ref Tx',
'num recurrent',
'normalized graph-smoothed position entropy',
'normalized graph-smoothed position entropy p-value',
'normalized graph-smoothed position entropy BH q-value']
permutation_df = permutation_df.sort_values(by=['normalized graph-smoothed position entropy p-value'])
return permutation_df[col_order]
|
def function[handle_protein_results, parameter[permutation_result]]:
constant[Takes in output from multiprocess_permutation function and converts to
a better formatted dataframe.
Parameters
----------
permutation_result : list
output from multiprocess_permutation
Returns
-------
permutation_df : pd.DataFrame
formatted output suitable to save
]
variable[mycols] assign[=] list[[<ast.Constant object at 0x7da18f8102b0>, <ast.Constant object at 0x7da18f810d30>, <ast.Constant object at 0x7da18f810be0>, <ast.Constant object at 0x7da18f812800>, <ast.Constant object at 0x7da18f811090>, <ast.Constant object at 0x7da18f811b70>]]
variable[permutation_df] assign[=] call[name[pd].DataFrame, parameter[name[permutation_result]]]
call[name[permutation_df]][constant[normalized graph-smoothed position entropy BH q-value]] assign[=] call[name[mypval].bh_fdr, parameter[call[name[permutation_df]][constant[normalized graph-smoothed position entropy p-value]]]]
variable[permutation_df] assign[=] call[name[permutation_df].set_index, parameter[constant[gene]]]
variable[col_order] assign[=] list[[<ast.Constant object at 0x7da18f810dc0>, <ast.Constant object at 0x7da18f811810>, <ast.Constant object at 0x7da18f812740>, <ast.Constant object at 0x7da18f810e80>, <ast.Constant object at 0x7da18f8125c0>, <ast.Constant object at 0x7da18f810580>, <ast.Constant object at 0x7da18f810850>]]
variable[permutation_df] assign[=] call[name[permutation_df].sort_values, parameter[]]
return[call[name[permutation_df]][name[col_order]]]
|
keyword[def] identifier[handle_protein_results] ( identifier[permutation_result] ):
literal[string]
identifier[mycols] =[ literal[string] , literal[string] , literal[string] ,
literal[string] ,
literal[string] , literal[string] ]
identifier[permutation_df] = identifier[pd] . identifier[DataFrame] ( identifier[permutation_result] , identifier[columns] = identifier[mycols] )
identifier[permutation_df] [ literal[string] ]= identifier[mypval] . identifier[bh_fdr] ( identifier[permutation_df] [ literal[string] ])
identifier[permutation_df] = identifier[permutation_df] . identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] )
identifier[col_order] =[ literal[string] , literal[string] , literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
identifier[permutation_df] = identifier[permutation_df] . identifier[sort_values] ( identifier[by] =[ literal[string] ])
keyword[return] identifier[permutation_df] [ identifier[col_order] ]
|
def handle_protein_results(permutation_result):
"""Takes in output from multiprocess_permutation function and converts to
a better formatted dataframe.
Parameters
----------
permutation_result : list
output from multiprocess_permutation
Returns
-------
permutation_df : pd.DataFrame
formatted output suitable to save
"""
mycols = ['gene', 'num recurrent', 'normalized graph-smoothed position entropy', 'normalized graph-smoothed position entropy p-value', 'Total Mutations', 'Unmapped to Ref Tx']
permutation_df = pd.DataFrame(permutation_result, columns=mycols)
# get benjamani hochberg adjusted p-values
permutation_df['normalized graph-smoothed position entropy BH q-value'] = mypval.bh_fdr(permutation_df['normalized graph-smoothed position entropy p-value'])
# order output
permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices
col_order = ['gene', 'Total Mutations', 'Unmapped to Ref Tx', 'num recurrent', 'normalized graph-smoothed position entropy', 'normalized graph-smoothed position entropy p-value', 'normalized graph-smoothed position entropy BH q-value']
permutation_df = permutation_df.sort_values(by=['normalized graph-smoothed position entropy p-value'])
return permutation_df[col_order]
|
def print_build_info(self):
"""Prints the include and library path being used for debugging purposes."""
if self.static_extension:
build_type = "static extension"
else:
build_type = "dynamic extension"
print("Build type: %s" % build_type)
print("Include path: %s" % " ".join(self.include_dirs))
print("Library path: %s" % " ".join(self.library_dirs))
print("Linked dynamic libraries: %s" % " ".join(self.libraries))
print("Linked static libraries: %s" % " ".join(self.extra_objects))
print("Extra compiler options: %s" % " ".join(self.extra_compile_args))
print("Extra linker options: %s" % " ".join(self.extra_link_args))
|
def function[print_build_info, parameter[self]]:
constant[Prints the include and library path being used for debugging purposes.]
if name[self].static_extension begin[:]
variable[build_type] assign[=] constant[static extension]
call[name[print], parameter[binary_operation[constant[Build type: %s] <ast.Mod object at 0x7da2590d6920> name[build_type]]]]
call[name[print], parameter[binary_operation[constant[Include path: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].include_dirs]]]]]
call[name[print], parameter[binary_operation[constant[Library path: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].library_dirs]]]]]
call[name[print], parameter[binary_operation[constant[Linked dynamic libraries: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].libraries]]]]]
call[name[print], parameter[binary_operation[constant[Linked static libraries: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].extra_objects]]]]]
call[name[print], parameter[binary_operation[constant[Extra compiler options: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].extra_compile_args]]]]]
call[name[print], parameter[binary_operation[constant[Extra linker options: %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[self].extra_link_args]]]]]
|
keyword[def] identifier[print_build_info] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[static_extension] :
identifier[build_type] = literal[string]
keyword[else] :
identifier[build_type] = literal[string]
identifier[print] ( literal[string] % identifier[build_type] )
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[include_dirs] ))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[library_dirs] ))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[libraries] ))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[extra_objects] ))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[extra_compile_args] ))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[extra_link_args] ))
|
def print_build_info(self):
"""Prints the include and library path being used for debugging purposes."""
if self.static_extension:
build_type = 'static extension' # depends on [control=['if'], data=[]]
else:
build_type = 'dynamic extension'
print('Build type: %s' % build_type)
print('Include path: %s' % ' '.join(self.include_dirs))
print('Library path: %s' % ' '.join(self.library_dirs))
print('Linked dynamic libraries: %s' % ' '.join(self.libraries))
print('Linked static libraries: %s' % ' '.join(self.extra_objects))
print('Extra compiler options: %s' % ' '.join(self.extra_compile_args))
print('Extra linker options: %s' % ' '.join(self.extra_link_args))
|
def new(localfile, jottapath, JFS):
"""Upload a new file from local disk (doesn't exist on JottaCloud).
Returns JottaFile object"""
with open(localfile) as lf:
_new = JFS.up(jottapath, lf)
return _new
|
def function[new, parameter[localfile, jottapath, JFS]]:
constant[Upload a new file from local disk (doesn't exist on JottaCloud).
Returns JottaFile object]
with call[name[open], parameter[name[localfile]]] begin[:]
variable[_new] assign[=] call[name[JFS].up, parameter[name[jottapath], name[lf]]]
return[name[_new]]
|
keyword[def] identifier[new] ( identifier[localfile] , identifier[jottapath] , identifier[JFS] ):
literal[string]
keyword[with] identifier[open] ( identifier[localfile] ) keyword[as] identifier[lf] :
identifier[_new] = identifier[JFS] . identifier[up] ( identifier[jottapath] , identifier[lf] )
keyword[return] identifier[_new]
|
def new(localfile, jottapath, JFS):
"""Upload a new file from local disk (doesn't exist on JottaCloud).
Returns JottaFile object"""
with open(localfile) as lf:
_new = JFS.up(jottapath, lf) # depends on [control=['with'], data=['lf']]
return _new
|
def draw(self,N=1.5):
"""compute every node coordinates after converging to optimal ordering by N
rounds, and finally perform the edge routing.
"""
while N>0.5:
for (l,mvmt) in self.ordering_step():
pass
N = N-1
if N>0:
for (l,mvmt) in self.ordering_step(oneway=True):
pass
self.setxy()
self.draw_edges()
|
def function[draw, parameter[self, N]]:
constant[compute every node coordinates after converging to optimal ordering by N
rounds, and finally perform the edge routing.
]
while compare[name[N] greater[>] constant[0.5]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0f05810>, <ast.Name object at 0x7da1b107b9d0>]]] in starred[call[name[self].ordering_step, parameter[]]] begin[:]
pass
variable[N] assign[=] binary_operation[name[N] - constant[1]]
if compare[name[N] greater[>] constant[0]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1044fa0>, <ast.Name object at 0x7da1b1045810>]]] in starred[call[name[self].ordering_step, parameter[]]] begin[:]
pass
call[name[self].setxy, parameter[]]
call[name[self].draw_edges, parameter[]]
|
keyword[def] identifier[draw] ( identifier[self] , identifier[N] = literal[int] ):
literal[string]
keyword[while] identifier[N] > literal[int] :
keyword[for] ( identifier[l] , identifier[mvmt] ) keyword[in] identifier[self] . identifier[ordering_step] ():
keyword[pass]
identifier[N] = identifier[N] - literal[int]
keyword[if] identifier[N] > literal[int] :
keyword[for] ( identifier[l] , identifier[mvmt] ) keyword[in] identifier[self] . identifier[ordering_step] ( identifier[oneway] = keyword[True] ):
keyword[pass]
identifier[self] . identifier[setxy] ()
identifier[self] . identifier[draw_edges] ()
|
def draw(self, N=1.5):
"""compute every node coordinates after converging to optimal ordering by N
rounds, and finally perform the edge routing.
"""
while N > 0.5:
for (l, mvmt) in self.ordering_step():
pass # depends on [control=['for'], data=[]]
N = N - 1 # depends on [control=['while'], data=['N']]
if N > 0:
for (l, mvmt) in self.ordering_step(oneway=True):
pass # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
self.setxy()
self.draw_edges()
|
def delete(self, ids):
"""
Method to delete vlan's by their ids
:param ids: Identifiers of vlan's
:return: None
"""
url = build_uri_with_ids('api/v3/vlan/%s/', ids)
return super(ApiVlan, self).delete(url)
|
def function[delete, parameter[self, ids]]:
constant[
Method to delete vlan's by their ids
:param ids: Identifiers of vlan's
:return: None
]
variable[url] assign[=] call[name[build_uri_with_ids], parameter[constant[api/v3/vlan/%s/], name[ids]]]
return[call[call[name[super], parameter[name[ApiVlan], name[self]]].delete, parameter[name[url]]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[url] = identifier[build_uri_with_ids] ( literal[string] , identifier[ids] )
keyword[return] identifier[super] ( identifier[ApiVlan] , identifier[self] ). identifier[delete] ( identifier[url] )
|
def delete(self, ids):
"""
Method to delete vlan's by their ids
:param ids: Identifiers of vlan's
:return: None
"""
url = build_uri_with_ids('api/v3/vlan/%s/', ids)
return super(ApiVlan, self).delete(url)
|
def load_request_data(request_schema, partial=False):
"""
Load request data as JSON using the given schema.
Forces JSON decoding even if the client not specify the `Content-Type` header properly.
This is friendlier to client and test software, even at the cost of not distinguishing
HTTP 400 and 415 errors.
"""
try:
json_data = request.get_json(force=True) or {}
except Exception:
# if `simplpejson` is installed, simplejson.scanner.JSONDecodeError will be raised
# on malformed JSON, where as built-in `json` returns None
json_data = {}
request_data = request_schema.load(json_data, partial=partial)
if request_data.errors:
# pass the validation errors back in the context
raise with_context(
UnprocessableEntity("Validation error"), [{
"message": "Could not validate field: {}".format(field),
"field": field,
"reasons": reasons
} for field, reasons in request_data.errors.items()],
)
return request_data.data
|
def function[load_request_data, parameter[request_schema, partial]]:
constant[
Load request data as JSON using the given schema.
Forces JSON decoding even if the client not specify the `Content-Type` header properly.
This is friendlier to client and test software, even at the cost of not distinguishing
HTTP 400 and 415 errors.
]
<ast.Try object at 0x7da1b0c20490>
variable[request_data] assign[=] call[name[request_schema].load, parameter[name[json_data]]]
if name[request_data].errors begin[:]
<ast.Raise object at 0x7da1b0c20dc0>
return[name[request_data].data]
|
keyword[def] identifier[load_request_data] ( identifier[request_schema] , identifier[partial] = keyword[False] ):
literal[string]
keyword[try] :
identifier[json_data] = identifier[request] . identifier[get_json] ( identifier[force] = keyword[True] ) keyword[or] {}
keyword[except] identifier[Exception] :
identifier[json_data] ={}
identifier[request_data] = identifier[request_schema] . identifier[load] ( identifier[json_data] , identifier[partial] = identifier[partial] )
keyword[if] identifier[request_data] . identifier[errors] :
keyword[raise] identifier[with_context] (
identifier[UnprocessableEntity] ( literal[string] ),[{
literal[string] : literal[string] . identifier[format] ( identifier[field] ),
literal[string] : identifier[field] ,
literal[string] : identifier[reasons]
} keyword[for] identifier[field] , identifier[reasons] keyword[in] identifier[request_data] . identifier[errors] . identifier[items] ()],
)
keyword[return] identifier[request_data] . identifier[data]
|
def load_request_data(request_schema, partial=False):
"""
Load request data as JSON using the given schema.
Forces JSON decoding even if the client not specify the `Content-Type` header properly.
This is friendlier to client and test software, even at the cost of not distinguishing
HTTP 400 and 415 errors.
"""
try:
json_data = request.get_json(force=True) or {} # depends on [control=['try'], data=[]]
except Exception:
# if `simplpejson` is installed, simplejson.scanner.JSONDecodeError will be raised
# on malformed JSON, where as built-in `json` returns None
json_data = {} # depends on [control=['except'], data=[]]
request_data = request_schema.load(json_data, partial=partial)
if request_data.errors:
# pass the validation errors back in the context
raise with_context(UnprocessableEntity('Validation error'), [{'message': 'Could not validate field: {}'.format(field), 'field': field, 'reasons': reasons} for (field, reasons) in request_data.errors.items()]) # depends on [control=['if'], data=[]]
return request_data.data
|
def _process_assignments(self, anexec, contents, mode="insert"):
"""Extracts all variable assignments from the body of the executable.
:arg mode: for real-time update; either 'insert', 'delete' or 'replace'.
"""
for assign in self.RE_ASSIGN.finditer(contents):
assignee = assign.group("assignee").strip()
target = re.split(r"[(%\s]", assignee)[0].lower()
#We only want to include variables that we know are in the scope of the
#current executable. This excludes function calls etc. with optional params.
if target in self._intrinsic:
continue
if target in anexec.members or \
target in anexec.parameters or \
(isinstance(anexec, Function) and target.lower() == anexec.name.lower()):
if mode == "insert":
anexec.add_assignment(re.split(r"[(\s]", assignee)[0])
elif mode == "delete":
#Remove the first instance of this assignment from the list
try:
index = element.assignments.index(assign)
del element.assignments[index]
except ValueError:
#We didn't have anything to remove, but python
pass
|
def function[_process_assignments, parameter[self, anexec, contents, mode]]:
constant[Extracts all variable assignments from the body of the executable.
:arg mode: for real-time update; either 'insert', 'delete' or 'replace'.
]
for taget[name[assign]] in starred[call[name[self].RE_ASSIGN.finditer, parameter[name[contents]]]] begin[:]
variable[assignee] assign[=] call[call[name[assign].group, parameter[constant[assignee]]].strip, parameter[]]
variable[target] assign[=] call[call[call[name[re].split, parameter[constant[[(%\s]], name[assignee]]]][constant[0]].lower, parameter[]]
if compare[name[target] in name[self]._intrinsic] begin[:]
continue
if <ast.BoolOp object at 0x7da1b26acbb0> begin[:]
if compare[name[mode] equal[==] constant[insert]] begin[:]
call[name[anexec].add_assignment, parameter[call[call[name[re].split, parameter[constant[[(\s]], name[assignee]]]][constant[0]]]]
|
keyword[def] identifier[_process_assignments] ( identifier[self] , identifier[anexec] , identifier[contents] , identifier[mode] = literal[string] ):
literal[string]
keyword[for] identifier[assign] keyword[in] identifier[self] . identifier[RE_ASSIGN] . identifier[finditer] ( identifier[contents] ):
identifier[assignee] = identifier[assign] . identifier[group] ( literal[string] ). identifier[strip] ()
identifier[target] = identifier[re] . identifier[split] ( literal[string] , identifier[assignee] )[ literal[int] ]. identifier[lower] ()
keyword[if] identifier[target] keyword[in] identifier[self] . identifier[_intrinsic] :
keyword[continue]
keyword[if] identifier[target] keyword[in] identifier[anexec] . identifier[members] keyword[or] identifier[target] keyword[in] identifier[anexec] . identifier[parameters] keyword[or] ( identifier[isinstance] ( identifier[anexec] , identifier[Function] ) keyword[and] identifier[target] . identifier[lower] ()== identifier[anexec] . identifier[name] . identifier[lower] ()):
keyword[if] identifier[mode] == literal[string] :
identifier[anexec] . identifier[add_assignment] ( identifier[re] . identifier[split] ( literal[string] , identifier[assignee] )[ literal[int] ])
keyword[elif] identifier[mode] == literal[string] :
keyword[try] :
identifier[index] = identifier[element] . identifier[assignments] . identifier[index] ( identifier[assign] )
keyword[del] identifier[element] . identifier[assignments] [ identifier[index] ]
keyword[except] identifier[ValueError] :
keyword[pass]
|
def _process_assignments(self, anexec, contents, mode='insert'):
"""Extracts all variable assignments from the body of the executable.
:arg mode: for real-time update; either 'insert', 'delete' or 'replace'.
"""
for assign in self.RE_ASSIGN.finditer(contents):
assignee = assign.group('assignee').strip()
target = re.split('[(%\\s]', assignee)[0].lower()
#We only want to include variables that we know are in the scope of the
#current executable. This excludes function calls etc. with optional params.
if target in self._intrinsic:
continue # depends on [control=['if'], data=[]]
if target in anexec.members or target in anexec.parameters or (isinstance(anexec, Function) and target.lower() == anexec.name.lower()):
if mode == 'insert':
anexec.add_assignment(re.split('[(\\s]', assignee)[0]) # depends on [control=['if'], data=[]]
elif mode == 'delete':
#Remove the first instance of this assignment from the list
try:
index = element.assignments.index(assign)
del element.assignments[index] # depends on [control=['try'], data=[]]
except ValueError:
#We didn't have anything to remove, but python
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['assign']]
|
def _take_screenshot(self):
"""Take a screenshot, also called by Mixin
Args:
- filename(string): file name to save
Returns:
PIL Image object
"""
raw_png = self._wda.screenshot()
img = Image.open(BytesIO(raw_png))
return img
|
def function[_take_screenshot, parameter[self]]:
constant[Take a screenshot, also called by Mixin
Args:
- filename(string): file name to save
Returns:
PIL Image object
]
variable[raw_png] assign[=] call[name[self]._wda.screenshot, parameter[]]
variable[img] assign[=] call[name[Image].open, parameter[call[name[BytesIO], parameter[name[raw_png]]]]]
return[name[img]]
|
keyword[def] identifier[_take_screenshot] ( identifier[self] ):
literal[string]
identifier[raw_png] = identifier[self] . identifier[_wda] . identifier[screenshot] ()
identifier[img] = identifier[Image] . identifier[open] ( identifier[BytesIO] ( identifier[raw_png] ))
keyword[return] identifier[img]
|
def _take_screenshot(self):
"""Take a screenshot, also called by Mixin
Args:
- filename(string): file name to save
Returns:
PIL Image object
"""
raw_png = self._wda.screenshot()
img = Image.open(BytesIO(raw_png))
return img
|
def _set_remote_mep(self, v, load=False):
"""
Setter method for remote_mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote_mep is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote_mep() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("remote_mep",remote_mep.remote_mep, yang_name="remote-mep", rest_name="remote-mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='remote-mep', extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}), is_container='list', yang_name="remote-mep", rest_name="remote-mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """remote_mep must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("remote_mep",remote_mep.remote_mep, yang_name="remote-mep", rest_name="remote-mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='remote-mep', extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}), is_container='list', yang_name="remote-mep", rest_name="remote-mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)""",
})
self.__remote_mep = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_remote_mep, parameter[self, v, load]]:
constant[
Setter method for remote_mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote_mep is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote_mep() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f8104f0>
name[self].__remote_mep assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_remote_mep] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[remote_mep] . identifier[remote_mep] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__remote_mep] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_remote_mep(self, v, load=False):
"""
Setter method for remote_mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote_mep is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote_mep() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('remote_mep', remote_mep.remote_mep, yang_name='remote-mep', rest_name='remote-mep', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='remote-mep', extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}), is_container='list', yang_name='remote-mep', rest_name='remote-mep', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'setDot1agRemoteMep', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Set Remote Mep'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'remote_mep must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("remote_mep",remote_mep.remote_mep, yang_name="remote-mep", rest_name="remote-mep", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'remote-mep\', extensions={u\'tailf-common\': {u\'callpoint\': u\'setDot1agRemoteMep\', u\'cli-suppress-list-no\': None, u\'cli-suppress-mode\': None, u\'info\': u\'Set Remote Mep\'}}), is_container=\'list\', yang_name="remote-mep", rest_name="remote-mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'setDot1agRemoteMep\', u\'cli-suppress-list-no\': None, u\'cli-suppress-mode\': None, u\'info\': u\'Set Remote Mep\'}}, namespace=\'urn:brocade.com:mgmt:brocade-dot1ag\', defining_module=\'brocade-dot1ag\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__remote_mep = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def visit_html(self, node):
"""
Generate html elements and schematic json
"""
parentClsNode = node.parent.parent
assert parentClsNode.attributes['objtype'] == 'class'
assert parentClsNode.attributes['domain'] == 'py'
sign = node.parent.parent.children[0]
assert isinstance(sign, desc_signature)
absolute_name = sign.attributes['ids'][0]
_construct = node["constructor_fn "]
serialno = node["serialno"]
try:
if _construct is None:
unitCls = generic_import(absolute_name)
if not issubclass(unitCls, Unit):
raise AssertionError(
"Can not use hwt-schematic sphinx directive and create schematic"
" for %s because it is not subclass of %r" % (absolute_name, Unit))
u = unitCls()
else:
assert len(_construct) > 0 and RE_IS_ID.match(_construct), _construct
_absolute_name = []
assert ".." not in absolute_name, absolute_name
for n in absolute_name.split(sep=".")[:-1]:
if n != "":
_absolute_name.append(n)
_absolute_name.append(_construct)
constructor_fn = generic_import(_absolute_name)
u = constructor_fn()
if not isinstance(u, Unit):
raise AssertionError(
"Can not use hwt-schematic sphinx directive and create schematic"
" for %s because function did not returned instance of %r, (%r)" % (
_absolute_name, Unit, u))
schem_file = SchematicPaths.get_sch_file_name_absolute(
self.document, absolute_name, serialno)
makedirs(path.dirname(schem_file), exist_ok=True)
with open(schem_file, "w") as f:
synthesised(u, DEFAULT_PLATFORM)
g = UnitToLNode(u, optimizations=DEFAULT_LAYOUT_OPTIMIZATIONS)
idStore = ElkIdStore()
data = g.toElkJson(idStore)
json.dump(data, f)
viewer = SchematicPaths.get_sch_viewer_link(self.document)
sch_name = SchematicPaths.get_sch_file_name(
self.document, absolute_name, serialno)
ref = nodes.reference(text=_("schematic"), # internal=False,
refuri="%s?schematic=%s" % (
viewer,
path.join(SchematicPaths.SCHEMATIC_DIR_PREFIX,
sch_name)))
node += ref
except Exception as e:
logging.error(e, exc_info=True)
raise Exception(
"Error occured while processing of %s" % absolute_name)
|
def function[visit_html, parameter[self, node]]:
constant[
Generate html elements and schematic json
]
variable[parentClsNode] assign[=] name[node].parent.parent
assert[compare[call[name[parentClsNode].attributes][constant[objtype]] equal[==] constant[class]]]
assert[compare[call[name[parentClsNode].attributes][constant[domain]] equal[==] constant[py]]]
variable[sign] assign[=] call[name[node].parent.parent.children][constant[0]]
assert[call[name[isinstance], parameter[name[sign], name[desc_signature]]]]
variable[absolute_name] assign[=] call[call[name[sign].attributes][constant[ids]]][constant[0]]
variable[_construct] assign[=] call[name[node]][constant[constructor_fn ]]
variable[serialno] assign[=] call[name[node]][constant[serialno]]
<ast.Try object at 0x7da18dc9b6a0>
|
keyword[def] identifier[visit_html] ( identifier[self] , identifier[node] ):
literal[string]
identifier[parentClsNode] = identifier[node] . identifier[parent] . identifier[parent]
keyword[assert] identifier[parentClsNode] . identifier[attributes] [ literal[string] ]== literal[string]
keyword[assert] identifier[parentClsNode] . identifier[attributes] [ literal[string] ]== literal[string]
identifier[sign] = identifier[node] . identifier[parent] . identifier[parent] . identifier[children] [ literal[int] ]
keyword[assert] identifier[isinstance] ( identifier[sign] , identifier[desc_signature] )
identifier[absolute_name] = identifier[sign] . identifier[attributes] [ literal[string] ][ literal[int] ]
identifier[_construct] = identifier[node] [ literal[string] ]
identifier[serialno] = identifier[node] [ literal[string] ]
keyword[try] :
keyword[if] identifier[_construct] keyword[is] keyword[None] :
identifier[unitCls] = identifier[generic_import] ( identifier[absolute_name] )
keyword[if] keyword[not] identifier[issubclass] ( identifier[unitCls] , identifier[Unit] ):
keyword[raise] identifier[AssertionError] (
literal[string]
literal[string] %( identifier[absolute_name] , identifier[Unit] ))
identifier[u] = identifier[unitCls] ()
keyword[else] :
keyword[assert] identifier[len] ( identifier[_construct] )> literal[int] keyword[and] identifier[RE_IS_ID] . identifier[match] ( identifier[_construct] ), identifier[_construct]
identifier[_absolute_name] =[]
keyword[assert] literal[string] keyword[not] keyword[in] identifier[absolute_name] , identifier[absolute_name]
keyword[for] identifier[n] keyword[in] identifier[absolute_name] . identifier[split] ( identifier[sep] = literal[string] )[:- literal[int] ]:
keyword[if] identifier[n] != literal[string] :
identifier[_absolute_name] . identifier[append] ( identifier[n] )
identifier[_absolute_name] . identifier[append] ( identifier[_construct] )
identifier[constructor_fn] = identifier[generic_import] ( identifier[_absolute_name] )
identifier[u] = identifier[constructor_fn] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[u] , identifier[Unit] ):
keyword[raise] identifier[AssertionError] (
literal[string]
literal[string] %(
identifier[_absolute_name] , identifier[Unit] , identifier[u] ))
identifier[schem_file] = identifier[SchematicPaths] . identifier[get_sch_file_name_absolute] (
identifier[self] . identifier[document] , identifier[absolute_name] , identifier[serialno] )
identifier[makedirs] ( identifier[path] . identifier[dirname] ( identifier[schem_file] ), identifier[exist_ok] = keyword[True] )
keyword[with] identifier[open] ( identifier[schem_file] , literal[string] ) keyword[as] identifier[f] :
identifier[synthesised] ( identifier[u] , identifier[DEFAULT_PLATFORM] )
identifier[g] = identifier[UnitToLNode] ( identifier[u] , identifier[optimizations] = identifier[DEFAULT_LAYOUT_OPTIMIZATIONS] )
identifier[idStore] = identifier[ElkIdStore] ()
identifier[data] = identifier[g] . identifier[toElkJson] ( identifier[idStore] )
identifier[json] . identifier[dump] ( identifier[data] , identifier[f] )
identifier[viewer] = identifier[SchematicPaths] . identifier[get_sch_viewer_link] ( identifier[self] . identifier[document] )
identifier[sch_name] = identifier[SchematicPaths] . identifier[get_sch_file_name] (
identifier[self] . identifier[document] , identifier[absolute_name] , identifier[serialno] )
identifier[ref] = identifier[nodes] . identifier[reference] ( identifier[text] = identifier[_] ( literal[string] ),
identifier[refuri] = literal[string] %(
identifier[viewer] ,
identifier[path] . identifier[join] ( identifier[SchematicPaths] . identifier[SCHEMATIC_DIR_PREFIX] ,
identifier[sch_name] )))
identifier[node] += identifier[ref]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( identifier[e] , identifier[exc_info] = keyword[True] )
keyword[raise] identifier[Exception] (
literal[string] % identifier[absolute_name] )
|
def visit_html(self, node):
"""
Generate html elements and schematic json
"""
parentClsNode = node.parent.parent
assert parentClsNode.attributes['objtype'] == 'class'
assert parentClsNode.attributes['domain'] == 'py'
sign = node.parent.parent.children[0]
assert isinstance(sign, desc_signature)
absolute_name = sign.attributes['ids'][0]
_construct = node['constructor_fn ']
serialno = node['serialno']
try:
if _construct is None:
unitCls = generic_import(absolute_name)
if not issubclass(unitCls, Unit):
raise AssertionError('Can not use hwt-schematic sphinx directive and create schematic for %s because it is not subclass of %r' % (absolute_name, Unit)) # depends on [control=['if'], data=[]]
u = unitCls() # depends on [control=['if'], data=[]]
else:
assert len(_construct) > 0 and RE_IS_ID.match(_construct), _construct
_absolute_name = []
assert '..' not in absolute_name, absolute_name
for n in absolute_name.split(sep='.')[:-1]:
if n != '':
_absolute_name.append(n) # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['n']]
_absolute_name.append(_construct)
constructor_fn = generic_import(_absolute_name)
u = constructor_fn()
if not isinstance(u, Unit):
raise AssertionError('Can not use hwt-schematic sphinx directive and create schematic for %s because function did not returned instance of %r, (%r)' % (_absolute_name, Unit, u)) # depends on [control=['if'], data=[]]
schem_file = SchematicPaths.get_sch_file_name_absolute(self.document, absolute_name, serialno)
makedirs(path.dirname(schem_file), exist_ok=True)
with open(schem_file, 'w') as f:
synthesised(u, DEFAULT_PLATFORM)
g = UnitToLNode(u, optimizations=DEFAULT_LAYOUT_OPTIMIZATIONS)
idStore = ElkIdStore()
data = g.toElkJson(idStore)
json.dump(data, f) # depends on [control=['with'], data=['f']]
viewer = SchematicPaths.get_sch_viewer_link(self.document)
sch_name = SchematicPaths.get_sch_file_name(self.document, absolute_name, serialno) # internal=False,
ref = nodes.reference(text=_('schematic'), refuri='%s?schematic=%s' % (viewer, path.join(SchematicPaths.SCHEMATIC_DIR_PREFIX, sch_name)))
node += ref # depends on [control=['try'], data=[]]
except Exception as e:
logging.error(e, exc_info=True)
raise Exception('Error occured while processing of %s' % absolute_name) # depends on [control=['except'], data=['e']]
|
def register_service(self, **kwargs):
"""
register this service with consul
kwargs passed to Consul.agent.service.register
"""
kwargs.setdefault('name', self.app.name)
self.session.agent.service.register(**kwargs)
|
def function[register_service, parameter[self]]:
constant[
register this service with consul
kwargs passed to Consul.agent.service.register
]
call[name[kwargs].setdefault, parameter[constant[name], name[self].app.name]]
call[name[self].session.agent.service.register, parameter[]]
|
keyword[def] identifier[register_service] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[app] . identifier[name] )
identifier[self] . identifier[session] . identifier[agent] . identifier[service] . identifier[register] (** identifier[kwargs] )
|
def register_service(self, **kwargs):
"""
register this service with consul
kwargs passed to Consul.agent.service.register
"""
kwargs.setdefault('name', self.app.name)
self.session.agent.service.register(**kwargs)
|
def wc_lha2dict(lha):
"""Convert a dictionary returned by pylha from a DSixTools WC input file
into a dictionary of Wilson coefficients."""
C = OrderedDict()
# try to read all WCs with 0, 2, or 4 fermions; if not found, set to zero
for k, (block, i) in WC_dict_0f.items():
try:
C[k] = dict(lha['BLOCK'][block]['values'])[i]
except KeyError:
C[k] = 0
for k in definitions.WC_keys_2f:
try:
C[k] = lha2matrix(lha['BLOCK']['WC' + k.upper()]['values'], (3,3)).real
except KeyError:
C[k] = np.zeros((3,3))
try: # try to add imaginary part
C[k] = C[k] + 1j*lha2matrix(lha['BLOCK']['IMWC' + k.upper()]['values'], (3,3))
except KeyError:
pass
for k in definitions.WC_keys_4f:
try:
C[k] = lha2matrix(lha['BLOCK']['WC' + k.upper()]['values'], (3,3,3,3))
except KeyError:
C[k] = np.zeros((3,3,3,3))
try: # try to add imaginary part
C[k] = C[k] + 1j*lha2matrix(lha['BLOCK']['IMWC' + k.upper()]['values'], (3,3,3,3))
except KeyError:
pass
return C
|
def function[wc_lha2dict, parameter[lha]]:
constant[Convert a dictionary returned by pylha from a DSixTools WC input file
into a dictionary of Wilson coefficients.]
variable[C] assign[=] call[name[OrderedDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18ede4c70>, <ast.Tuple object at 0x7da18ede7d90>]]] in starred[call[name[WC_dict_0f].items, parameter[]]] begin[:]
<ast.Try object at 0x7da18ede72b0>
for taget[name[k]] in starred[name[definitions].WC_keys_2f] begin[:]
<ast.Try object at 0x7da18ede4ac0>
<ast.Try object at 0x7da18ede7490>
for taget[name[k]] in starred[name[definitions].WC_keys_4f] begin[:]
<ast.Try object at 0x7da18ede6740>
<ast.Try object at 0x7da2054a4e50>
return[name[C]]
|
keyword[def] identifier[wc_lha2dict] ( identifier[lha] ):
literal[string]
identifier[C] = identifier[OrderedDict] ()
keyword[for] identifier[k] ,( identifier[block] , identifier[i] ) keyword[in] identifier[WC_dict_0f] . identifier[items] ():
keyword[try] :
identifier[C] [ identifier[k] ]= identifier[dict] ( identifier[lha] [ literal[string] ][ identifier[block] ][ literal[string] ])[ identifier[i] ]
keyword[except] identifier[KeyError] :
identifier[C] [ identifier[k] ]= literal[int]
keyword[for] identifier[k] keyword[in] identifier[definitions] . identifier[WC_keys_2f] :
keyword[try] :
identifier[C] [ identifier[k] ]= identifier[lha2matrix] ( identifier[lha] [ literal[string] ][ literal[string] + identifier[k] . identifier[upper] ()][ literal[string] ],( literal[int] , literal[int] )). identifier[real]
keyword[except] identifier[KeyError] :
identifier[C] [ identifier[k] ]= identifier[np] . identifier[zeros] (( literal[int] , literal[int] ))
keyword[try] :
identifier[C] [ identifier[k] ]= identifier[C] [ identifier[k] ]+ literal[int] * identifier[lha2matrix] ( identifier[lha] [ literal[string] ][ literal[string] + identifier[k] . identifier[upper] ()][ literal[string] ],( literal[int] , literal[int] ))
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[for] identifier[k] keyword[in] identifier[definitions] . identifier[WC_keys_4f] :
keyword[try] :
identifier[C] [ identifier[k] ]= identifier[lha2matrix] ( identifier[lha] [ literal[string] ][ literal[string] + identifier[k] . identifier[upper] ()][ literal[string] ],( literal[int] , literal[int] , literal[int] , literal[int] ))
keyword[except] identifier[KeyError] :
identifier[C] [ identifier[k] ]= identifier[np] . identifier[zeros] (( literal[int] , literal[int] , literal[int] , literal[int] ))
keyword[try] :
identifier[C] [ identifier[k] ]= identifier[C] [ identifier[k] ]+ literal[int] * identifier[lha2matrix] ( identifier[lha] [ literal[string] ][ literal[string] + identifier[k] . identifier[upper] ()][ literal[string] ],( literal[int] , literal[int] , literal[int] , literal[int] ))
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[return] identifier[C]
|
def wc_lha2dict(lha):
"""Convert a dictionary returned by pylha from a DSixTools WC input file
into a dictionary of Wilson coefficients."""
C = OrderedDict()
# try to read all WCs with 0, 2, or 4 fermions; if not found, set to zero
for (k, (block, i)) in WC_dict_0f.items():
try:
C[k] = dict(lha['BLOCK'][block]['values'])[i] # depends on [control=['try'], data=[]]
except KeyError:
C[k] = 0 # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
for k in definitions.WC_keys_2f:
try:
C[k] = lha2matrix(lha['BLOCK']['WC' + k.upper()]['values'], (3, 3)).real # depends on [control=['try'], data=[]]
except KeyError:
C[k] = np.zeros((3, 3)) # depends on [control=['except'], data=[]]
try: # try to add imaginary part
C[k] = C[k] + 1j * lha2matrix(lha['BLOCK']['IMWC' + k.upper()]['values'], (3, 3)) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['k']]
for k in definitions.WC_keys_4f:
try:
C[k] = lha2matrix(lha['BLOCK']['WC' + k.upper()]['values'], (3, 3, 3, 3)) # depends on [control=['try'], data=[]]
except KeyError:
C[k] = np.zeros((3, 3, 3, 3)) # depends on [control=['except'], data=[]]
try: # try to add imaginary part
C[k] = C[k] + 1j * lha2matrix(lha['BLOCK']['IMWC' + k.upper()]['values'], (3, 3, 3, 3)) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['k']]
return C
|
def set_division(self, division):
"""
Select the "current" division that we'll be working on/with.
"""
try:
division = int(division)
except (TypeError, ValueError):
raise V1DivisionError('Supplied division %r is not a number' %
(division,))
urlbase = 'v1/%d/' % (division,)
resource = urljoin(
urlbase,
"crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST'")
try:
self.rest(GET(resource))
except AssertionError:
raise V1DivisionError('Invalid division %r according to server' %
(division,))
self.storage.set_division(division)
|
def function[set_division, parameter[self, division]]:
constant[
Select the "current" division that we'll be working on/with.
]
<ast.Try object at 0x7da1b039ace0>
variable[urlbase] assign[=] binary_operation[constant[v1/%d/] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05c6140>]]]
variable[resource] assign[=] call[name[urljoin], parameter[name[urlbase], constant[crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST']]]
<ast.Try object at 0x7da1b05c4fd0>
call[name[self].storage.set_division, parameter[name[division]]]
|
keyword[def] identifier[set_division] ( identifier[self] , identifier[division] ):
literal[string]
keyword[try] :
identifier[division] = identifier[int] ( identifier[division] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[V1DivisionError] ( literal[string] %
( identifier[division] ,))
identifier[urlbase] = literal[string] %( identifier[division] ,)
identifier[resource] = identifier[urljoin] (
identifier[urlbase] ,
literal[string] )
keyword[try] :
identifier[self] . identifier[rest] ( identifier[GET] ( identifier[resource] ))
keyword[except] identifier[AssertionError] :
keyword[raise] identifier[V1DivisionError] ( literal[string] %
( identifier[division] ,))
identifier[self] . identifier[storage] . identifier[set_division] ( identifier[division] )
|
def set_division(self, division):
"""
Select the "current" division that we'll be working on/with.
"""
try:
division = int(division) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise V1DivisionError('Supplied division %r is not a number' % (division,)) # depends on [control=['except'], data=[]]
urlbase = 'v1/%d/' % (division,)
resource = urljoin(urlbase, "crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST'")
try:
self.rest(GET(resource)) # depends on [control=['try'], data=[]]
except AssertionError:
raise V1DivisionError('Invalid division %r according to server' % (division,)) # depends on [control=['except'], data=[]]
self.storage.set_division(division)
|
def update_group_dampening(self, group_id, dampening_id, dampening):
"""
Update an existing group dampening
:param group_id: Group Trigger id attached to dampening
:param dampening_id: id of the dampening to be updated
:return: Group Dampening created
"""
data = self._serialize_object(dampening)
url = self._service_url(['triggers', 'groups', group_id, 'dampenings', dampening_id])
return Dampening(self._put(url, data))
|
def function[update_group_dampening, parameter[self, group_id, dampening_id, dampening]]:
constant[
Update an existing group dampening
:param group_id: Group Trigger id attached to dampening
:param dampening_id: id of the dampening to be updated
:return: Group Dampening created
]
variable[data] assign[=] call[name[self]._serialize_object, parameter[name[dampening]]]
variable[url] assign[=] call[name[self]._service_url, parameter[list[[<ast.Constant object at 0x7da2054a4370>, <ast.Constant object at 0x7da2054a4c10>, <ast.Name object at 0x7da2054a4490>, <ast.Constant object at 0x7da2054a47f0>, <ast.Name object at 0x7da2054a47c0>]]]]
return[call[name[Dampening], parameter[call[name[self]._put, parameter[name[url], name[data]]]]]]
|
keyword[def] identifier[update_group_dampening] ( identifier[self] , identifier[group_id] , identifier[dampening_id] , identifier[dampening] ):
literal[string]
identifier[data] = identifier[self] . identifier[_serialize_object] ( identifier[dampening] )
identifier[url] = identifier[self] . identifier[_service_url] ([ literal[string] , literal[string] , identifier[group_id] , literal[string] , identifier[dampening_id] ])
keyword[return] identifier[Dampening] ( identifier[self] . identifier[_put] ( identifier[url] , identifier[data] ))
|
def update_group_dampening(self, group_id, dampening_id, dampening):
"""
Update an existing group dampening
:param group_id: Group Trigger id attached to dampening
:param dampening_id: id of the dampening to be updated
:return: Group Dampening created
"""
data = self._serialize_object(dampening)
url = self._service_url(['triggers', 'groups', group_id, 'dampenings', dampening_id])
return Dampening(self._put(url, data))
|
def nl_socket_modify_cb(sk, type_, kind, func, arg):
"""Modify the callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L633
Sets specific callback functions in the existing nl_cb class instance stored in the nl_sock socket.
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
type_ -- which type callback to set (integer).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
"""
return int(nl_cb_set(sk.s_cb, type_, kind, func, arg))
|
def function[nl_socket_modify_cb, parameter[sk, type_, kind, func, arg]]:
constant[Modify the callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L633
Sets specific callback functions in the existing nl_cb class instance stored in the nl_sock socket.
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
type_ -- which type callback to set (integer).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
]
return[call[name[int], parameter[call[name[nl_cb_set], parameter[name[sk].s_cb, name[type_], name[kind], name[func], name[arg]]]]]]
|
keyword[def] identifier[nl_socket_modify_cb] ( identifier[sk] , identifier[type_] , identifier[kind] , identifier[func] , identifier[arg] ):
literal[string]
keyword[return] identifier[int] ( identifier[nl_cb_set] ( identifier[sk] . identifier[s_cb] , identifier[type_] , identifier[kind] , identifier[func] , identifier[arg] ))
|
def nl_socket_modify_cb(sk, type_, kind, func, arg):
"""Modify the callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L633
Sets specific callback functions in the existing nl_cb class instance stored in the nl_sock socket.
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
type_ -- which type callback to set (integer).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
"""
return int(nl_cb_set(sk.s_cb, type_, kind, func, arg))
|
def create(vm_):
r'''
Create a single VM from a data dict.
vm\_
The dictionary use to create a VM.
Optional vm\_ dict options for overwriting template:
region_id
Optional - OpenNebula Zone ID
memory
Optional - In MB
cpu
Optional - Percent of host CPU to allocate
vcpu
Optional - Amount of vCPUs to allocate
CLI Example:
.. code-block:: bash
salt-cloud -p my-opennebula-profile vm_name
salt-cloud -p my-opennebula-profile vm_name memory=16384 cpu=2.5 vcpu=16
'''
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__,
__active_provider_name__ or 'opennebula',
vm_['profile']) is False:
return False
except AttributeError:
pass
__utils__['cloud.fire_event'](
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']),
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
log.info('Creating Cloud VM %s', vm_['name'])
kwargs = {
'name': vm_['name'],
'template_id': get_template(vm_),
'region_id': get_location(vm_),
}
if 'template' in vm_:
kwargs['image_id'] = get_template_id({'name': vm_['template']})
private_networking = config.get_cloud_config_value(
'private_networking', vm_, __opts__, search_global=False, default=None
)
kwargs['private_networking'] = 'true' if private_networking else 'false'
__utils__['cloud.fire_event'](
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
args={
'kwargs': __utils__['cloud.filter_event']('requesting', kwargs, list(kwargs)),
},
sock_dir=__opts__['sock_dir'],
)
template = []
if kwargs.get('region_id'):
template.append('SCHED_REQUIREMENTS="ID={0}"'.format(kwargs.get('region_id')))
if vm_.get('memory'):
template.append('MEMORY={0}'.format(vm_.get('memory')))
if vm_.get('cpu'):
template.append('CPU={0}'.format(vm_.get('cpu')))
if vm_.get('vcpu'):
template.append('VCPU={0}'.format(vm_.get('vcpu')))
if vm_.get('disk'):
get_disks = vm_.get('disk')
template_name = vm_['image']
for disk in get_disks:
template.append(_get_device_template(disk, get_disks[disk],
template=template_name))
if 'CLONE' not in six.text_type(template):
raise SaltCloudSystemExit(
'Missing an image disk to clone. Must define a clone disk alongside all other disk definitions.'
)
template_args = "\n".join(template)
try:
server, user, password = _get_xml_rpc()
auth = ':'.join([user, password])
cret = server.one.template.instantiate(auth,
int(kwargs['template_id']),
kwargs['name'],
False,
template_args)
if not cret[0]:
log.error(
'Error creating %s on OpenNebula\n\n'
'The following error was returned when trying to '
'instantiate the template: %s',
vm_['name'], cret[1],
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
except Exception as exc:
log.error(
'Error creating %s on OpenNebula\n\n'
'The following exception was thrown when trying to '
'run the initial deployment: %s',
vm_['name'], exc,
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
fqdn = vm_.get('fqdn_base')
if fqdn is not None:
fqdn = '{0}.{1}'.format(vm_['name'], fqdn)
def __query_node_data(vm_name):
node_data = show_instance(vm_name, call='action')
if not node_data:
# Trigger an error in the wait_for_ip function
return False
if node_data['state'] == '7':
return False
if node_data['lcm_state'] == '3':
return node_data
try:
data = __utils__['cloud.wait_for_ip'](
__query_node_data,
update_args=(vm_['name'],),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=2),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(six.text_type(exc))
key_filename = config.get_cloud_config_value(
'private_key', vm_, __opts__, search_global=False, default=None
)
if key_filename is not None and not os.path.isfile(key_filename):
raise SaltCloudConfigError(
'The defined key_filename \'{0}\' does not exist'.format(
key_filename
)
)
if fqdn:
vm_['ssh_host'] = fqdn
private_ip = '0.0.0.0'
else:
try:
private_ip = data['private_ips'][0]
except KeyError:
try:
private_ip = data['template']['nic']['ip']
except KeyError:
# if IPv6 is used try this as last resort
# OpenNebula does not yet show ULA address here so take global
private_ip = data['template']['nic']['ip6_global']
vm_['ssh_host'] = private_ip
ssh_username = config.get_cloud_config_value(
'ssh_username', vm_, __opts__, default='root'
)
vm_['username'] = ssh_username
vm_['key_filename'] = key_filename
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
ret['id'] = data['id']
ret['image'] = vm_['image']
ret['name'] = vm_['name']
ret['size'] = data['template']['memory']
ret['state'] = data['state']
ret['private_ips'] = private_ip
ret['public_ips'] = []
log.info('Created Cloud VM \'%s\'', vm_['name'])
log.debug(
'\'%s\' VM creation details:\n%s',
vm_['name'], pprint.pformat(data)
)
__utils__['cloud.fire_event'](
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']),
sock_dir=__opts__['sock_dir'],
)
return ret
|
def function[create, parameter[vm_]]:
constant[
Create a single VM from a data dict.
vm\_
The dictionary use to create a VM.
Optional vm\_ dict options for overwriting template:
region_id
Optional - OpenNebula Zone ID
memory
Optional - In MB
cpu
Optional - Percent of host CPU to allocate
vcpu
Optional - Amount of vCPUs to allocate
CLI Example:
.. code-block:: bash
salt-cloud -p my-opennebula-profile vm_name
salt-cloud -p my-opennebula-profile vm_name memory=16384 cpu=2.5 vcpu=16
]
<ast.Try object at 0x7da204344100>
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[starting create], call[constant[salt/cloud/{0}/creating].format, parameter[call[name[vm_]][constant[name]]]]]]
call[name[log].info, parameter[constant[Creating Cloud VM %s], call[name[vm_]][constant[name]]]]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da204345ea0>, <ast.Constant object at 0x7da204345ff0>, <ast.Constant object at 0x7da204346fb0>], [<ast.Subscript object at 0x7da204344910>, <ast.Call object at 0x7da204347c70>, <ast.Call object at 0x7da2043464d0>]]
if compare[constant[template] in name[vm_]] begin[:]
call[name[kwargs]][constant[image_id]] assign[=] call[name[get_template_id], parameter[dictionary[[<ast.Constant object at 0x7da204345d80>], [<ast.Subscript object at 0x7da204345420>]]]]
variable[private_networking] assign[=] call[name[config].get_cloud_config_value, parameter[constant[private_networking], name[vm_], name[__opts__]]]
call[name[kwargs]][constant[private_networking]] assign[=] <ast.IfExp object at 0x7da204346440>
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[requesting instance], call[constant[salt/cloud/{0}/requesting].format, parameter[call[name[vm_]][constant[name]]]]]]
variable[template] assign[=] list[[]]
if call[name[kwargs].get, parameter[constant[region_id]]] begin[:]
call[name[template].append, parameter[call[constant[SCHED_REQUIREMENTS="ID={0}"].format, parameter[call[name[kwargs].get, parameter[constant[region_id]]]]]]]
if call[name[vm_].get, parameter[constant[memory]]] begin[:]
call[name[template].append, parameter[call[constant[MEMORY={0}].format, parameter[call[name[vm_].get, parameter[constant[memory]]]]]]]
if call[name[vm_].get, parameter[constant[cpu]]] begin[:]
call[name[template].append, parameter[call[constant[CPU={0}].format, parameter[call[name[vm_].get, parameter[constant[cpu]]]]]]]
if call[name[vm_].get, parameter[constant[vcpu]]] begin[:]
call[name[template].append, parameter[call[constant[VCPU={0}].format, parameter[call[name[vm_].get, parameter[constant[vcpu]]]]]]]
if call[name[vm_].get, parameter[constant[disk]]] begin[:]
variable[get_disks] assign[=] call[name[vm_].get, parameter[constant[disk]]]
variable[template_name] assign[=] call[name[vm_]][constant[image]]
for taget[name[disk]] in starred[name[get_disks]] begin[:]
call[name[template].append, parameter[call[name[_get_device_template], parameter[name[disk], call[name[get_disks]][name[disk]]]]]]
if compare[constant[CLONE] <ast.NotIn object at 0x7da2590d7190> call[name[six].text_type, parameter[name[template]]]] begin[:]
<ast.Raise object at 0x7da18f58d540>
variable[template_args] assign[=] call[constant[
].join, parameter[name[template]]]
<ast.Try object at 0x7da18f58d420>
variable[fqdn] assign[=] call[name[vm_].get, parameter[constant[fqdn_base]]]
if compare[name[fqdn] is_not constant[None]] begin[:]
variable[fqdn] assign[=] call[constant[{0}.{1}].format, parameter[call[name[vm_]][constant[name]], name[fqdn]]]
def function[__query_node_data, parameter[vm_name]]:
variable[node_data] assign[=] call[name[show_instance], parameter[name[vm_name]]]
if <ast.UnaryOp object at 0x7da18f58f910> begin[:]
return[constant[False]]
if compare[call[name[node_data]][constant[state]] equal[==] constant[7]] begin[:]
return[constant[False]]
if compare[call[name[node_data]][constant[lcm_state]] equal[==] constant[3]] begin[:]
return[name[node_data]]
<ast.Try object at 0x7da18bc72890>
variable[key_filename] assign[=] call[name[config].get_cloud_config_value, parameter[constant[private_key], name[vm_], name[__opts__]]]
if <ast.BoolOp object at 0x7da18bc70640> begin[:]
<ast.Raise object at 0x7da18bc702b0>
if name[fqdn] begin[:]
call[name[vm_]][constant[ssh_host]] assign[=] name[fqdn]
variable[private_ip] assign[=] constant[0.0.0.0]
variable[ssh_username] assign[=] call[name[config].get_cloud_config_value, parameter[constant[ssh_username], name[vm_], name[__opts__]]]
call[name[vm_]][constant[username]] assign[=] name[ssh_username]
call[name[vm_]][constant[key_filename]] assign[=] name[key_filename]
variable[ret] assign[=] call[call[name[__utils__]][constant[cloud.bootstrap]], parameter[name[vm_], name[__opts__]]]
call[name[ret]][constant[id]] assign[=] call[name[data]][constant[id]]
call[name[ret]][constant[image]] assign[=] call[name[vm_]][constant[image]]
call[name[ret]][constant[name]] assign[=] call[name[vm_]][constant[name]]
call[name[ret]][constant[size]] assign[=] call[call[name[data]][constant[template]]][constant[memory]]
call[name[ret]][constant[state]] assign[=] call[name[data]][constant[state]]
call[name[ret]][constant[private_ips]] assign[=] name[private_ip]
call[name[ret]][constant[public_ips]] assign[=] list[[]]
call[name[log].info, parameter[constant[Created Cloud VM '%s'], call[name[vm_]][constant[name]]]]
call[name[log].debug, parameter[constant['%s' VM creation details:
%s], call[name[vm_]][constant[name]], call[name[pprint].pformat, parameter[name[data]]]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[created instance], call[constant[salt/cloud/{0}/created].format, parameter[call[name[vm_]][constant[name]]]]]]
return[name[ret]]
|
keyword[def] identifier[create] ( identifier[vm_] ):
literal[string]
keyword[try] :
keyword[if] identifier[vm_] [ literal[string] ] keyword[and] identifier[config] . identifier[is_profile_configured] ( identifier[__opts__] ,
identifier[__active_provider_name__] keyword[or] literal[string] ,
identifier[vm_] [ literal[string] ]) keyword[is] keyword[False] :
keyword[return] keyword[False]
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[vm_] ,[ literal[string] , literal[string] , literal[string] , literal[string] ]),
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
identifier[log] . identifier[info] ( literal[string] , identifier[vm_] [ literal[string] ])
identifier[kwargs] ={
literal[string] : identifier[vm_] [ literal[string] ],
literal[string] : identifier[get_template] ( identifier[vm_] ),
literal[string] : identifier[get_location] ( identifier[vm_] ),
}
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[get_template_id] ({ literal[string] : identifier[vm_] [ literal[string] ]})
identifier[private_networking] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None]
)
identifier[kwargs] [ literal[string] ]= literal[string] keyword[if] identifier[private_networking] keyword[else] literal[string]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] ={
literal[string] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[kwargs] , identifier[list] ( identifier[kwargs] )),
},
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
)
identifier[template] =[]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
identifier[template] . identifier[append] ( literal[string] . identifier[format] ( identifier[kwargs] . identifier[get] ( literal[string] )))
keyword[if] identifier[vm_] . identifier[get] ( literal[string] ):
identifier[template] . identifier[append] ( literal[string] . identifier[format] ( identifier[vm_] . identifier[get] ( literal[string] )))
keyword[if] identifier[vm_] . identifier[get] ( literal[string] ):
identifier[template] . identifier[append] ( literal[string] . identifier[format] ( identifier[vm_] . identifier[get] ( literal[string] )))
keyword[if] identifier[vm_] . identifier[get] ( literal[string] ):
identifier[template] . identifier[append] ( literal[string] . identifier[format] ( identifier[vm_] . identifier[get] ( literal[string] )))
keyword[if] identifier[vm_] . identifier[get] ( literal[string] ):
identifier[get_disks] = identifier[vm_] . identifier[get] ( literal[string] )
identifier[template_name] = identifier[vm_] [ literal[string] ]
keyword[for] identifier[disk] keyword[in] identifier[get_disks] :
identifier[template] . identifier[append] ( identifier[_get_device_template] ( identifier[disk] , identifier[get_disks] [ identifier[disk] ],
identifier[template] = identifier[template_name] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[six] . identifier[text_type] ( identifier[template] ):
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
identifier[template_args] = literal[string] . identifier[join] ( identifier[template] )
keyword[try] :
identifier[server] , identifier[user] , identifier[password] = identifier[_get_xml_rpc] ()
identifier[auth] = literal[string] . identifier[join] ([ identifier[user] , identifier[password] ])
identifier[cret] = identifier[server] . identifier[one] . identifier[template] . identifier[instantiate] ( identifier[auth] ,
identifier[int] ( identifier[kwargs] [ literal[string] ]),
identifier[kwargs] [ literal[string] ],
keyword[False] ,
identifier[template_args] )
keyword[if] keyword[not] identifier[cret] [ literal[int] ]:
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string] ,
identifier[vm_] [ literal[string] ], identifier[cret] [ literal[int] ],
identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] keyword[False]
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string] ,
identifier[vm_] [ literal[string] ], identifier[exc] ,
identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] keyword[False]
identifier[fqdn] = identifier[vm_] . identifier[get] ( literal[string] )
keyword[if] identifier[fqdn] keyword[is] keyword[not] keyword[None] :
identifier[fqdn] = literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ], identifier[fqdn] )
keyword[def] identifier[__query_node_data] ( identifier[vm_name] ):
identifier[node_data] = identifier[show_instance] ( identifier[vm_name] , identifier[call] = literal[string] )
keyword[if] keyword[not] identifier[node_data] :
keyword[return] keyword[False]
keyword[if] identifier[node_data] [ literal[string] ]== literal[string] :
keyword[return] keyword[False]
keyword[if] identifier[node_data] [ literal[string] ]== literal[string] :
keyword[return] identifier[node_data]
keyword[try] :
identifier[data] = identifier[__utils__] [ literal[string] ](
identifier[__query_node_data] ,
identifier[update_args] =( identifier[vm_] [ literal[string] ],),
identifier[timeout] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[int] * literal[int] ),
identifier[interval] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[int] ),
)
keyword[except] ( identifier[SaltCloudExecutionTimeout] , identifier[SaltCloudExecutionFailure] ) keyword[as] identifier[exc] :
keyword[try] :
identifier[destroy] ( identifier[vm_] [ literal[string] ])
keyword[except] identifier[SaltCloudSystemExit] :
keyword[pass]
keyword[finally] :
keyword[raise] identifier[SaltCloudSystemExit] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
identifier[key_filename] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None]
)
keyword[if] identifier[key_filename] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[key_filename] ):
keyword[raise] identifier[SaltCloudConfigError] (
literal[string] . identifier[format] (
identifier[key_filename]
)
)
keyword[if] identifier[fqdn] :
identifier[vm_] [ literal[string] ]= identifier[fqdn]
identifier[private_ip] = literal[string]
keyword[else] :
keyword[try] :
identifier[private_ip] = identifier[data] [ literal[string] ][ literal[int] ]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[private_ip] = identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[private_ip] = identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[vm_] [ literal[string] ]= identifier[private_ip]
identifier[ssh_username] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string]
)
identifier[vm_] [ literal[string] ]= identifier[ssh_username]
identifier[vm_] [ literal[string] ]= identifier[key_filename]
identifier[ret] = identifier[__utils__] [ literal[string] ]( identifier[vm_] , identifier[__opts__] )
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[vm_] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[vm_] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ][ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[private_ip]
identifier[ret] [ literal[string] ]=[]
identifier[log] . identifier[info] ( literal[string] , identifier[vm_] [ literal[string] ])
identifier[log] . identifier[debug] (
literal[string] ,
identifier[vm_] [ literal[string] ], identifier[pprint] . identifier[pformat] ( identifier[data] )
)
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[vm_] ,[ literal[string] , literal[string] , literal[string] , literal[string] ]),
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
)
keyword[return] identifier[ret]
|
def create(vm_):
"""
Create a single VM from a data dict.
vm\\_
The dictionary use to create a VM.
Optional vm\\_ dict options for overwriting template:
region_id
Optional - OpenNebula Zone ID
memory
Optional - In MB
cpu
Optional - Percent of host CPU to allocate
vcpu
Optional - Amount of vCPUs to allocate
CLI Example:
.. code-block:: bash
salt-cloud -p my-opennebula-profile vm_name
salt-cloud -p my-opennebula-profile vm_name memory=16384 cpu=2.5 vcpu=16
"""
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'opennebula', vm_['profile']) is False:
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
__utils__['cloud.fire_event']('event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
log.info('Creating Cloud VM %s', vm_['name'])
kwargs = {'name': vm_['name'], 'template_id': get_template(vm_), 'region_id': get_location(vm_)}
if 'template' in vm_:
kwargs['image_id'] = get_template_id({'name': vm_['template']}) # depends on [control=['if'], data=['vm_']]
private_networking = config.get_cloud_config_value('private_networking', vm_, __opts__, search_global=False, default=None)
kwargs['private_networking'] = 'true' if private_networking else 'false'
__utils__['cloud.fire_event']('event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args={'kwargs': __utils__['cloud.filter_event']('requesting', kwargs, list(kwargs))}, sock_dir=__opts__['sock_dir'])
template = []
if kwargs.get('region_id'):
template.append('SCHED_REQUIREMENTS="ID={0}"'.format(kwargs.get('region_id'))) # depends on [control=['if'], data=[]]
if vm_.get('memory'):
template.append('MEMORY={0}'.format(vm_.get('memory'))) # depends on [control=['if'], data=[]]
if vm_.get('cpu'):
template.append('CPU={0}'.format(vm_.get('cpu'))) # depends on [control=['if'], data=[]]
if vm_.get('vcpu'):
template.append('VCPU={0}'.format(vm_.get('vcpu'))) # depends on [control=['if'], data=[]]
if vm_.get('disk'):
get_disks = vm_.get('disk')
template_name = vm_['image']
for disk in get_disks:
template.append(_get_device_template(disk, get_disks[disk], template=template_name)) # depends on [control=['for'], data=['disk']]
if 'CLONE' not in six.text_type(template):
raise SaltCloudSystemExit('Missing an image disk to clone. Must define a clone disk alongside all other disk definitions.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
template_args = '\n'.join(template)
try:
(server, user, password) = _get_xml_rpc()
auth = ':'.join([user, password])
cret = server.one.template.instantiate(auth, int(kwargs['template_id']), kwargs['name'], False, template_args)
if not cret[0]:
# Show the traceback if the debug logging level is enabled
log.error('Error creating %s on OpenNebula\n\nThe following error was returned when trying to instantiate the template: %s', vm_['name'], cret[1], exc_info_on_loglevel=logging.DEBUG)
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as exc:
# Show the traceback if the debug logging level is enabled
log.error('Error creating %s on OpenNebula\n\nThe following exception was thrown when trying to run the initial deployment: %s', vm_['name'], exc, exc_info_on_loglevel=logging.DEBUG)
return False # depends on [control=['except'], data=['exc']]
fqdn = vm_.get('fqdn_base')
if fqdn is not None:
fqdn = '{0}.{1}'.format(vm_['name'], fqdn) # depends on [control=['if'], data=['fqdn']]
def __query_node_data(vm_name):
node_data = show_instance(vm_name, call='action')
if not node_data:
# Trigger an error in the wait_for_ip function
return False # depends on [control=['if'], data=[]]
if node_data['state'] == '7':
return False # depends on [control=['if'], data=[]]
if node_data['lcm_state'] == '3':
return node_data # depends on [control=['if'], data=[]]
try:
data = __utils__['cloud.wait_for_ip'](__query_node_data, update_args=(vm_['name'],), timeout=config.get_cloud_config_value('wait_for_ip_timeout', vm_, __opts__, default=10 * 60), interval=config.get_cloud_config_value('wait_for_ip_interval', vm_, __opts__, default=2)) # depends on [control=['try'], data=[]]
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name']) # depends on [control=['try'], data=[]]
except SaltCloudSystemExit:
pass # depends on [control=['except'], data=[]]
finally:
raise SaltCloudSystemExit(six.text_type(exc)) # depends on [control=['except'], data=['exc']]
key_filename = config.get_cloud_config_value('private_key', vm_, __opts__, search_global=False, default=None)
if key_filename is not None and (not os.path.isfile(key_filename)):
raise SaltCloudConfigError("The defined key_filename '{0}' does not exist".format(key_filename)) # depends on [control=['if'], data=[]]
if fqdn:
vm_['ssh_host'] = fqdn
private_ip = '0.0.0.0' # depends on [control=['if'], data=[]]
else:
try:
private_ip = data['private_ips'][0] # depends on [control=['try'], data=[]]
except KeyError:
try:
private_ip = data['template']['nic']['ip'] # depends on [control=['try'], data=[]]
except KeyError:
# if IPv6 is used try this as last resort
# OpenNebula does not yet show ULA address here so take global
private_ip = data['template']['nic']['ip6_global'] # depends on [control=['except'], data=[]]
vm_['ssh_host'] = private_ip # depends on [control=['except'], data=[]]
ssh_username = config.get_cloud_config_value('ssh_username', vm_, __opts__, default='root')
vm_['username'] = ssh_username
vm_['key_filename'] = key_filename
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
ret['id'] = data['id']
ret['image'] = vm_['image']
ret['name'] = vm_['name']
ret['size'] = data['template']['memory']
ret['state'] = data['state']
ret['private_ips'] = private_ip
ret['public_ips'] = []
log.info("Created Cloud VM '%s'", vm_['name'])
log.debug("'%s' VM creation details:\n%s", vm_['name'], pprint.pformat(data))
__utils__['cloud.fire_event']('event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'])
return ret
|
def mask_and_mean_loss(input_tensor, binary_tensor, axis=None):
"""
Mask a loss by using a tensor filled with 0 or 1 and average correctly.
:param input_tensor: A float tensor of shape [batch_size, ...] representing the loss/cross_entropy
:param binary_tensor: A float tensor of shape [batch_size, ...] representing the mask.
:return: A float tensor of shape [batch_size, ...] representing the masked loss.
:param axis: The dimensions to reduce. If None (the default), reduces all dimensions.
Must be in the range [-rank(input_tensor), rank(input_tensor)).
"""
return mean_on_masked(mask_loss(input_tensor, binary_tensor), binary_tensor, axis=axis)
|
def function[mask_and_mean_loss, parameter[input_tensor, binary_tensor, axis]]:
constant[
Mask a loss by using a tensor filled with 0 or 1 and average correctly.
:param input_tensor: A float tensor of shape [batch_size, ...] representing the loss/cross_entropy
:param binary_tensor: A float tensor of shape [batch_size, ...] representing the mask.
:return: A float tensor of shape [batch_size, ...] representing the masked loss.
:param axis: The dimensions to reduce. If None (the default), reduces all dimensions.
Must be in the range [-rank(input_tensor), rank(input_tensor)).
]
return[call[name[mean_on_masked], parameter[call[name[mask_loss], parameter[name[input_tensor], name[binary_tensor]]], name[binary_tensor]]]]
|
keyword[def] identifier[mask_and_mean_loss] ( identifier[input_tensor] , identifier[binary_tensor] , identifier[axis] = keyword[None] ):
literal[string]
keyword[return] identifier[mean_on_masked] ( identifier[mask_loss] ( identifier[input_tensor] , identifier[binary_tensor] ), identifier[binary_tensor] , identifier[axis] = identifier[axis] )
|
def mask_and_mean_loss(input_tensor, binary_tensor, axis=None):
"""
Mask a loss by using a tensor filled with 0 or 1 and average correctly.
:param input_tensor: A float tensor of shape [batch_size, ...] representing the loss/cross_entropy
:param binary_tensor: A float tensor of shape [batch_size, ...] representing the mask.
:return: A float tensor of shape [batch_size, ...] representing the masked loss.
:param axis: The dimensions to reduce. If None (the default), reduces all dimensions.
Must be in the range [-rank(input_tensor), rank(input_tensor)).
"""
return mean_on_masked(mask_loss(input_tensor, binary_tensor), binary_tensor, axis=axis)
|
def write_json(json_obj, filename, mode="w", print_pretty=True):
'''write_json will (optionally,pretty print) a json object to file
:param json_obj: the dict to print to json
:param filename: the output file to write to
:param pretty_print: if True, will use nicer formatting
'''
with open(filename, mode) as filey:
if print_pretty:
filey.writelines(
json.dumps(
json_obj,
indent=4,
separators=(
',',
': ')))
else:
filey.writelines(json.dumps(json_obj))
return filename
|
def function[write_json, parameter[json_obj, filename, mode, print_pretty]]:
constant[write_json will (optionally,pretty print) a json object to file
:param json_obj: the dict to print to json
:param filename: the output file to write to
:param pretty_print: if True, will use nicer formatting
]
with call[name[open], parameter[name[filename], name[mode]]] begin[:]
if name[print_pretty] begin[:]
call[name[filey].writelines, parameter[call[name[json].dumps, parameter[name[json_obj]]]]]
return[name[filename]]
|
keyword[def] identifier[write_json] ( identifier[json_obj] , identifier[filename] , identifier[mode] = literal[string] , identifier[print_pretty] = keyword[True] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , identifier[mode] ) keyword[as] identifier[filey] :
keyword[if] identifier[print_pretty] :
identifier[filey] . identifier[writelines] (
identifier[json] . identifier[dumps] (
identifier[json_obj] ,
identifier[indent] = literal[int] ,
identifier[separators] =(
literal[string] ,
literal[string] )))
keyword[else] :
identifier[filey] . identifier[writelines] ( identifier[json] . identifier[dumps] ( identifier[json_obj] ))
keyword[return] identifier[filename]
|
def write_json(json_obj, filename, mode='w', print_pretty=True):
"""write_json will (optionally,pretty print) a json object to file
:param json_obj: the dict to print to json
:param filename: the output file to write to
:param pretty_print: if True, will use nicer formatting
"""
with open(filename, mode) as filey:
if print_pretty:
filey.writelines(json.dumps(json_obj, indent=4, separators=(',', ': '))) # depends on [control=['if'], data=[]]
else:
filey.writelines(json.dumps(json_obj)) # depends on [control=['with'], data=['filey']]
return filename
|
def _run_collect_allelic_counts(pos_file, pos_name, work_dir, data):
"""Counts by alleles for a specific sample and set of positions.
"""
out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "structural", "counts"))
out_file = os.path.join(out_dir, "%s-%s-counts.tsv" % (dd.get_sample_name(data), pos_name))
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
params = ["-T", "CollectAllelicCounts", "-L", pos_file, "-I", dd.get_align_bam(data),
"-R", dd.get_ref_file(data), "-O", tx_out_file]
_run_with_memory_scaling(params, tx_out_file, data)
return out_file
|
def function[_run_collect_allelic_counts, parameter[pos_file, pos_name, work_dir, data]]:
constant[Counts by alleles for a specific sample and set of positions.
]
variable[out_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[name[dd].get_work_dir, parameter[name[data]]], constant[structural], constant[counts]]]]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], binary_operation[constant[%s-%s-counts.tsv] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1790af0>, <ast.Name object at 0x7da1b1791180>]]]]]
if <ast.UnaryOp object at 0x7da1b1791120> begin[:]
with call[name[file_transaction], parameter[name[data], name[out_file]]] begin[:]
variable[params] assign[=] list[[<ast.Constant object at 0x7da1b17901f0>, <ast.Constant object at 0x7da1b1790e50>, <ast.Constant object at 0x7da1b1790dc0>, <ast.Name object at 0x7da1b1790fa0>, <ast.Constant object at 0x7da1b17901c0>, <ast.Call object at 0x7da1b1790730>, <ast.Constant object at 0x7da1b1790910>, <ast.Call object at 0x7da1b1790070>, <ast.Constant object at 0x7da1b1790700>, <ast.Name object at 0x7da1b1790940>]]
call[name[_run_with_memory_scaling], parameter[name[params], name[tx_out_file], name[data]]]
return[name[out_file]]
|
keyword[def] identifier[_run_collect_allelic_counts] ( identifier[pos_file] , identifier[pos_name] , identifier[work_dir] , identifier[data] ):
literal[string]
identifier[out_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dd] . identifier[get_work_dir] ( identifier[data] ), literal[string] , literal[string] ))
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] %( identifier[dd] . identifier[get_sample_name] ( identifier[data] ), identifier[pos_name] ))
keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ):
keyword[with] identifier[file_transaction] ( identifier[data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] :
identifier[params] =[ literal[string] , literal[string] , literal[string] , identifier[pos_file] , literal[string] , identifier[dd] . identifier[get_align_bam] ( identifier[data] ),
literal[string] , identifier[dd] . identifier[get_ref_file] ( identifier[data] ), literal[string] , identifier[tx_out_file] ]
identifier[_run_with_memory_scaling] ( identifier[params] , identifier[tx_out_file] , identifier[data] )
keyword[return] identifier[out_file]
|
def _run_collect_allelic_counts(pos_file, pos_name, work_dir, data):
"""Counts by alleles for a specific sample and set of positions.
"""
out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), 'structural', 'counts'))
out_file = os.path.join(out_dir, '%s-%s-counts.tsv' % (dd.get_sample_name(data), pos_name))
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
params = ['-T', 'CollectAllelicCounts', '-L', pos_file, '-I', dd.get_align_bam(data), '-R', dd.get_ref_file(data), '-O', tx_out_file]
_run_with_memory_scaling(params, tx_out_file, data) # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]]
return out_file
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.